first commit
This commit is contained in:
0
.gitignore
vendored
Normal file
0
.gitignore
vendored
Normal file
196
README.md
Normal file
196
README.md
Normal file
@@ -0,0 +1,196 @@
|
||||
# SatMaster — F4BPO Satellite Tracker
|
||||
|
||||
Amateur satellite tracking application with Doppler correction for FlexRadio 8600 and rotor control via PstRotator.
|
||||
|
||||
**Stack:** Go + Wails v2 + Svelte
|
||||
**Platform:** Windows (primary), Linux/macOS (compilable)
|
||||
|
||||
---
|
||||
|
||||
## Features
|
||||
|
||||
- 🗺️ **World map** — real-time satellite positions on Leaflet dark map, footprint overlay
|
||||
- 📡 **Polar plot** — az/el pass track with AOS/LOS markers, current position animation
|
||||
- 🕐 **Pass prediction** — 24h upcoming passes, Max El, duration, quality rating
|
||||
- 📻 **Doppler correction** — automatic TX/RX frequency correction via FlexRadio 8600 TCP API
|
||||
- 🔄 **Rotor control** — Az/El commands to PstRotator via UDP XML protocol
|
||||
- 🛰️ **TLE management** — auto-fetch from tle.oscarwatch.org with local cache fallback
|
||||
|
||||
---
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
SatMaster/
|
||||
├── main.go # Wails entry point
|
||||
├── app.go # App struct, Wails bindings, position loop
|
||||
├── wails.json # Wails project config
|
||||
│
|
||||
├── backend/
|
||||
│ ├── tle/
|
||||
│ │ └── manager.go # TLE fetch (oscarwatch.org), cache, parse, lookup
|
||||
│ │
|
||||
│ ├── propagator/
|
||||
│ │ └── engine.go # SGP4 propagation, az/el, pass prediction
|
||||
│ │
|
||||
│ ├── doppler/
|
||||
│ │ └── calculator.go # Doppler shift computation (range rate → Hz correction)
|
||||
│ │
|
||||
│ ├── flexradio/
|
||||
│ │ └── client.go # FlexRadio SmartSDR TCP API (port 4992)
|
||||
│ │
|
||||
│ └── rotor/
|
||||
│ └── pstrotator.go # PstRotator UDP XML protocol (port 12000)
|
||||
│
|
||||
└── frontend/src/
|
||||
├── App.svelte # Root layout, Wails event subscriptions
|
||||
├── stores/satstore.js # Svelte stores for all app state
|
||||
├── lib/
|
||||
│ ├── wails.js # Wails Go call bridge + dev stubs
|
||||
│ └── utils.js # Formatting helpers (freq, az, el, etc.)
|
||||
└── components/
|
||||
├── StatusBar.svelte # Top bar: clock, sat data, Doppler, connections
|
||||
├── SatSelector.svelte # Left sidebar: search + quick-select + full list
|
||||
├── WorldMap.svelte # Leaflet map with sat markers + footprints
|
||||
├── PolarPlot.svelte # SVG polar az/el plot
|
||||
├── PassesPanel.svelte # Pass list + detail + embedded polar plot
|
||||
└── SettingsPanel.svelte # QTH, frequencies, FlexRadio, rotor, TLE
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
```bash
|
||||
# Go 1.21+
|
||||
# Node.js 18+
|
||||
# Wails v2
|
||||
go install github.com/wailsapp/wails/v2/cmd/wails@latest
|
||||
|
||||
# Verify
|
||||
wails doctor
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Setup & Build
|
||||
|
||||
```bash
|
||||
# 1. Clone / copy project
|
||||
cd SatMaster
|
||||
|
||||
# 2. Download Go dependencies
|
||||
go mod tidy
|
||||
|
||||
# 3. Install frontend deps
|
||||
cd frontend && npm install && cd ..
|
||||
|
||||
# 4. Development mode (hot reload)
|
||||
wails dev
|
||||
|
||||
# 5. Production build (single .exe)
|
||||
wails build
|
||||
# → build/bin/SatMaster.exe
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## FlexRadio 8600 — Protocol Notes
|
||||
|
||||
SatMaster uses the SmartSDR **TCP API** on port **4992**.
|
||||
|
||||
Commands sent:
|
||||
```
|
||||
C1|slice set 0 freq=145.800000 # Set RX frequency (MHz)
|
||||
C2|transmit set freq=145.200000 # Set TX frequency (MHz)
|
||||
```
|
||||
|
||||
- Connect: Settings → FlexRadio 8600 → enter IP → Connect
|
||||
- The active slice index is 0 (Slice A) by default
|
||||
- Doppler correction fires every second during active tracking
|
||||
- Dead-band: updates only sent when shift exceeds ~10 Hz
|
||||
|
||||
---
|
||||
|
||||
## PstRotator — Protocol Notes
|
||||
|
||||
SatMaster sends **UDP XML** to PstRotator on port **12000** (default).
|
||||
|
||||
```xml
|
||||
<PST><CMD>SET</CMD><AZ>180.0</AZ><EL>45.0</EL></PST>
|
||||
```
|
||||
|
||||
- Configure in PstRotator: Setup → UDP Server → Port 12000
|
||||
- Dead-band: 0.5° Az and El to prevent rotor hunting
|
||||
- Elevation is clamped to [0°, 90°] — no negative elevation commands sent
|
||||
|
||||
---
|
||||
|
||||
## TLE Data
|
||||
|
||||
- **Primary source:** `https://tle.oscarwatch.org/nasabare.txt`
|
||||
- **Fallback:** Celestrak SOCRATES
|
||||
- **Cache:** `%LOCALAPPDATA%\SatMaster\satmaster_tle_cache.txt` (Windows)
|
||||
- **Bundled fallback:** ISS, AO-7, AO-27, SO-50 (always available)
|
||||
- TLE age shown in status bar; highlighted orange if > 48h
|
||||
|
||||
---
|
||||
|
||||
## Pass Prediction
|
||||
|
||||
- Step: 10s (30s when satellite below -15° elevation)
|
||||
- AOS/LOS bisection precision: 1 second
|
||||
- Horizon: passes below 5° Max El are filtered from the list
|
||||
- 24-hour prediction window
|
||||
|
||||
---
|
||||
|
||||
## Doppler Formula
|
||||
|
||||
```
|
||||
f_corrected_down = f_nominal × (1 - v_range / c)
|
||||
f_corrected_up = f_nominal / (1 - v_range / c)
|
||||
|
||||
where:
|
||||
v_range = range rate in km/s (+ = receding, - = approaching)
|
||||
c = 299792.458 km/s
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Keyboard Shortcuts
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| `M` | Switch to Map tab |
|
||||
| `P` | Switch to Passes tab |
|
||||
| `S` | Switch to Settings tab |
|
||||
|
||||
---
|
||||
|
||||
## Adding More Satellites
|
||||
|
||||
The app loads all satellites from the TLE feed (~2000+ amateur sats from OscarWatch).
|
||||
Use the search box in the satellite selector to find any satellite by name or NORAD catalog number.
|
||||
|
||||
Common amateur satellites available:
|
||||
- ISS (ZARYA) — 145.800 MHz FM voice
|
||||
- AO-7 — 145.975 MHz / 29.502 MHz linear transponder
|
||||
- AO-27 — 145.850 MHz FM
|
||||
- SO-50 — 436.795 MHz FM
|
||||
- RS-44 — 435.640 MHz / 145.935 MHz linear
|
||||
- PO-101 (Diwata-2) — 437.500 MHz FM
|
||||
- XW-2A/B/C/D — CW/linear transponders
|
||||
- CAS-4A/B — linear transponders
|
||||
|
||||
---
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- [ ] Ground track on world map (lat/lon per pass point)
|
||||
- [ ] Multiple satellite tracking (split-screen polar)
|
||||
- [ ] Audio squelch mute between passes
|
||||
- [ ] Log4OM integration for contact logging
|
||||
- [ ] Satellite database with nominal frequencies auto-loaded
|
||||
- [ ] AOS/LOS audio alert
|
||||
- [ ] Export pass schedule to CSV/PDF
|
||||
379
app.go
Normal file
379
app.go
Normal file
@@ -0,0 +1,379 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"SatMaster/backend/doppler"
|
||||
"SatMaster/backend/flexradio"
|
||||
"SatMaster/backend/propagator"
|
||||
"SatMaster/backend/rotor"
|
||||
"SatMaster/backend/tle"
|
||||
|
||||
"github.com/wailsapp/wails/v2/pkg/runtime"
|
||||
)
|
||||
|
||||
// App holds all subsystems and is the Wails binding target.
|
||||
type App struct {
|
||||
dopplerEnabled bool
|
||||
rotorEnabled bool
|
||||
rotorAzOnly bool
|
||||
trackFreqMode bool // Track freq+mode for current sat
|
||||
trackAzimuth bool // Track azimuth for current sat
|
||||
savedRxSlice int
|
||||
savedTxSlice int
|
||||
ctx context.Context
|
||||
|
||||
tleManager *tle.Manager
|
||||
propagator *propagator.Engine
|
||||
flexRadio *flexradio.Client
|
||||
rotorClient *rotor.PstRotatorClient
|
||||
dopplerCalc *doppler.Calculator
|
||||
|
||||
// Tracking state
|
||||
trackedSat string
|
||||
trackingOn bool
|
||||
cancelTrack context.CancelFunc
|
||||
watchlist []string
|
||||
}
|
||||
|
||||
func NewApp() *App {
|
||||
return &App{
|
||||
dopplerEnabled: true,
|
||||
rotorEnabled: true,
|
||||
rotorAzOnly: true,
|
||||
trackFreqMode: false,
|
||||
trackAzimuth: false,
|
||||
savedRxSlice: 0, // defaults — overridden by SetSliceConfig from Settings on connect
|
||||
savedTxSlice: 1,
|
||||
tleManager: tle.NewManager(),
|
||||
propagator: propagator.NewEngine(),
|
||||
flexRadio: flexradio.NewClient(),
|
||||
rotorClient: rotor.NewPstRotatorClient(),
|
||||
dopplerCalc: doppler.NewCalculator(),
|
||||
watchlist: []string{"ISS (ZARYA)", "AO-7", "AO-27", "SO-50", "RS-44", "AO-91", "FO-29"},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) startup(ctx context.Context) {
|
||||
a.ctx = ctx
|
||||
log.Println("[SatMaster] Startup")
|
||||
|
||||
// Load TLEs on startup (try remote, fallback local)
|
||||
go func() {
|
||||
if err := a.tleManager.FetchAndCache(); err != nil {
|
||||
log.Printf("[TLE] Remote fetch failed: %v — loading local cache", err)
|
||||
a.tleManager.LoadLocal()
|
||||
}
|
||||
runtime.EventsEmit(ctx, "tle:loaded", a.tleManager.SatelliteNames())
|
||||
// Immediately emit positions after TLE load
|
||||
allSats := a.tleManager.All()
|
||||
log.Printf("[TLE] Loaded %d satellites, watchlist=%v", len(allSats), a.watchlist)
|
||||
var initSats []*tle.Satellite
|
||||
for _, name := range a.watchlist {
|
||||
if s := a.tleManager.Get(name); s != nil {
|
||||
initSats = append(initSats, s)
|
||||
}
|
||||
}
|
||||
if len(initSats) == 0 {
|
||||
log.Printf("[TLE] Watchlist resolved 0 sats, using all %d", len(allSats))
|
||||
initSats = allSats
|
||||
} else {
|
||||
log.Printf("[TLE] Watchlist resolved %d sats", len(initSats))
|
||||
}
|
||||
if positions := a.propagator.AllPositions(initSats, time.Now()); len(positions) > 0 {
|
||||
log.Printf("[TLE] Emitting %d initial positions", len(positions))
|
||||
runtime.EventsEmit(ctx, "sat:positions", positions)
|
||||
}
|
||||
}()
|
||||
|
||||
// Emit position updates every second (filtered by watchlist)
|
||||
go a.positionLoop(ctx)
|
||||
}
|
||||
|
||||
func (a *App) shutdown(ctx context.Context) {
|
||||
a.StopTracking()
|
||||
a.flexRadio.Disconnect()
|
||||
}
|
||||
|
||||
// positionLoop emits sat positions to frontend every second.
|
||||
func (a *App) positionLoop(ctx context.Context) {
|
||||
ticker := time.NewTicker(1 * time.Second)
|
||||
defer ticker.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
// Build sat list from watchlist; fall back to all if empty/unresolved
|
||||
allSats := a.tleManager.All()
|
||||
var sats []*tle.Satellite
|
||||
for _, name := range a.watchlist {
|
||||
if s := a.tleManager.Get(name); s != nil {
|
||||
sats = append(sats, s)
|
||||
}
|
||||
}
|
||||
if len(sats) == 0 {
|
||||
sats = allSats
|
||||
}
|
||||
positions := a.propagator.AllPositions(sats, time.Now())
|
||||
log.Printf("[Loop] watchlist=%d resolved=%d positions=%d", len(a.watchlist), len(sats), len(positions))
|
||||
runtime.EventsEmit(ctx, "sat:positions", positions)
|
||||
|
||||
// If tracking active, update doppler + rotor
|
||||
if a.trackingOn && a.trackedSat != "" {
|
||||
a.updateTracking()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) updateTracking() {
|
||||
sat := a.tleManager.Get(a.trackedSat)
|
||||
if sat == nil {
|
||||
return
|
||||
}
|
||||
now := time.Now()
|
||||
pos := a.propagator.Position(sat, now)
|
||||
if pos == nil {
|
||||
return
|
||||
}
|
||||
obs := a.propagator.ObserverPosition()
|
||||
|
||||
// Doppler: only when globally enabled AND tracking enabled for this sat AND el >= 0
|
||||
if a.flexRadio.IsConnected() && pos.Elevation >= 0 && a.dopplerEnabled && a.trackFreqMode {
|
||||
upFreq, downFreq := a.dopplerCalc.Correct(pos, obs, now)
|
||||
if err := a.flexRadio.SetFrequency(downFreq, upFreq); err != nil {
|
||||
log.Printf("[Doppler] SetFrequency error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Rotor: only when globally enabled AND tracking enabled for this sat AND el >= 0
|
||||
if a.rotorClient.IsConnected() && a.rotorEnabled && a.trackAzimuth && pos.Elevation >= 0 {
|
||||
var rotErr error
|
||||
if a.rotorAzOnly {
|
||||
rotErr = a.rotorClient.SetAzOnly(pos.Azimuth)
|
||||
} else {
|
||||
rotErr = a.rotorClient.SetAzEl(pos.Azimuth, pos.Elevation)
|
||||
}
|
||||
if rotErr != nil {
|
||||
log.Printf("[Rotor] error: %v", rotErr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Wails-exposed methods ──────────────────────────────────────────────────
|
||||
|
||||
// GetSatelliteList returns all satellite names from loaded TLEs.
|
||||
func (a *App) GetSatelliteList() []string {
|
||||
return a.tleManager.SatelliteNames()
|
||||
}
|
||||
|
||||
// GetPasses returns upcoming passes for a satellite over the next 24h.
|
||||
func (a *App) GetPasses(satName string, hours float64) []propagator.Pass {
|
||||
sat := a.tleManager.Get(satName)
|
||||
if sat == nil {
|
||||
return nil
|
||||
}
|
||||
return a.propagator.ComputePasses(sat, time.Now(), hours)
|
||||
}
|
||||
|
||||
// GetCurrentPosition returns az/el/lat/lon for a satellite right now.
|
||||
func (a *App) GetCurrentPosition(satName string) *propagator.SatPosition {
|
||||
sat := a.tleManager.Get(satName)
|
||||
if sat == nil {
|
||||
return nil
|
||||
}
|
||||
return a.propagator.Position(sat, time.Now())
|
||||
}
|
||||
|
||||
// SetObserverLocation sets the QTH for pass prediction and Doppler.
|
||||
func (a *App) SetObserverLocation(lat, lon, altM float64) {
|
||||
a.propagator.SetObserver(lat, lon, altM)
|
||||
a.dopplerCalc.SetObserver(lat, lon, altM)
|
||||
}
|
||||
|
||||
// SetSatelliteFrequencies configures nominal uplink/downlink for Doppler.
|
||||
func (a *App) SetSatelliteFrequencies(downHz, upHz float64) {
|
||||
a.dopplerCalc.SetNominal(downHz, upHz)
|
||||
}
|
||||
|
||||
// SetSatelliteMode sets the FlexRadio slice mode for satellite operation.
|
||||
// Called automatically when a frequency is selected in the frontend.
|
||||
func (a *App) SetSatelliteMode(mode string) {
|
||||
if !a.flexRadio.IsConnected() {
|
||||
return
|
||||
}
|
||||
flexMode := flexradio.SatModeToFlex(mode)
|
||||
if err := a.flexRadio.SetMode(flexMode); err != nil {
|
||||
log.Printf("[FlexRadio] SetMode error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// StartTracking begins active tracking of a satellite.
|
||||
func (a *App) StartTracking(satName string) string {
|
||||
sat := a.tleManager.Get(satName)
|
||||
if sat == nil {
|
||||
return "Satellite not found: " + satName
|
||||
}
|
||||
// Reset per-satellite tracking toggles on satellite change
|
||||
if a.trackedSat != satName {
|
||||
a.trackFreqMode = false
|
||||
a.trackAzimuth = false
|
||||
}
|
||||
a.trackedSat = satName
|
||||
a.trackingOn = true
|
||||
log.Printf("[Tracking] Started: %s", satName)
|
||||
return "OK"
|
||||
}
|
||||
|
||||
// StopTracking stops active tracking.
|
||||
func (a *App) StopTracking() {
|
||||
a.trackingOn = false
|
||||
a.trackedSat = ""
|
||||
if a.cancelTrack != nil {
|
||||
a.cancelTrack()
|
||||
}
|
||||
}
|
||||
|
||||
// ConnectFlexRadio connects to the FlexRadio 8600 SmartSDR TCP API.
|
||||
func (a *App) ConnectFlexRadio(host string, port int) string {
|
||||
if err := a.flexRadio.Connect(host, port); err != nil {
|
||||
return "Error: " + err.Error()
|
||||
}
|
||||
// Restore saved slice config and query slices
|
||||
go func() {
|
||||
time.Sleep(800 * time.Millisecond)
|
||||
a.flexRadio.SetSlices(a.savedRxSlice, a.savedTxSlice)
|
||||
a.flexRadio.QuerySlices()
|
||||
}()
|
||||
return "OK"
|
||||
}
|
||||
|
||||
// DisconnectFlexRadio closes the FlexRadio connection.
|
||||
func (a *App) DisconnectFlexRadio() {
|
||||
a.flexRadio.Disconnect()
|
||||
}
|
||||
|
||||
// ConnectRotor connects to PstRotator UDP.
|
||||
func (a *App) ConnectRotor(host string, port int) string {
|
||||
if err := a.rotorClient.Connect(host, port); err != nil {
|
||||
return "Error: " + err.Error()
|
||||
}
|
||||
return "OK"
|
||||
}
|
||||
|
||||
// DisconnectRotor closes the PstRotator connection.
|
||||
func (a *App) DisconnectRotor() {
|
||||
a.rotorClient.Disconnect()
|
||||
}
|
||||
|
||||
// RefreshTLE forces a re-fetch of TLE data.
|
||||
func (a *App) RefreshTLE() string {
|
||||
if err := a.tleManager.FetchAndCache(); err != nil {
|
||||
return "Error: " + err.Error()
|
||||
}
|
||||
runtime.EventsEmit(a.ctx, "tle:loaded", a.tleManager.SatelliteNames())
|
||||
return "OK"
|
||||
}
|
||||
|
||||
// SetRotorAzOnly sets azimuth-only mode (true) or az+el mode (false).
|
||||
func (a *App) SetRotorAzOnly(azOnly bool) {
|
||||
a.rotorAzOnly = azOnly
|
||||
mode := "Az+El"
|
||||
if azOnly {
|
||||
mode = "Az only"
|
||||
}
|
||||
log.Printf("[Rotor] mode: %s", mode)
|
||||
}
|
||||
|
||||
// SetRotorEnabled enables or disables rotator tracking.
|
||||
func (a *App) SetRotorEnabled(enabled bool) {
|
||||
a.rotorEnabled = enabled
|
||||
log.Printf("[Rotor] tracking %s", map[bool]string{true: "enabled", false: "disabled"}[enabled])
|
||||
}
|
||||
|
||||
// GetRotorEnabled returns current rotator tracking state.
|
||||
func (a *App) GetRotorEnabled() bool {
|
||||
return a.rotorEnabled
|
||||
}
|
||||
|
||||
// SetTrackFreqMode enables/disables frequency+mode tracking for current satellite.
|
||||
func (a *App) SetTrackFreqMode(enabled bool) {
|
||||
a.trackFreqMode = enabled
|
||||
// Reset dead-band so freq is sent immediately when tracking starts
|
||||
if enabled {
|
||||
a.flexRadio.ResetDeadband()
|
||||
}
|
||||
log.Printf("[Doppler] Freq/Mode tracking: %v", enabled)
|
||||
}
|
||||
|
||||
// SetTrackAzimuth enables/disables azimuth tracking for current satellite.
|
||||
func (a *App) SetTrackAzimuth(enabled bool) {
|
||||
a.trackAzimuth = enabled
|
||||
// Reset rotor dead-band so it moves immediately
|
||||
if enabled {
|
||||
a.rotorClient.ResetDeadband()
|
||||
}
|
||||
log.Printf("[Rotor] Azimuth tracking: %v", enabled)
|
||||
}
|
||||
|
||||
// SetDopplerEnabled enables or disables Doppler correction.
|
||||
func (a *App) SetDopplerEnabled(enabled bool) {
|
||||
a.dopplerEnabled = enabled
|
||||
log.Printf("[Doppler] %s", map[bool]string{true: "enabled", false: "disabled"}[enabled])
|
||||
}
|
||||
|
||||
// GetDopplerEnabled returns current Doppler state.
|
||||
func (a *App) GetDopplerEnabled() bool {
|
||||
return a.dopplerEnabled
|
||||
}
|
||||
|
||||
// GetFlexRadioStatus returns connection status.
|
||||
func (a *App) GetFlexRadioStatus() bool {
|
||||
return a.flexRadio.IsConnected()
|
||||
}
|
||||
|
||||
// GetSliceConfig returns current RX/TX slice indices.
|
||||
func (a *App) GetSliceConfig() map[string]int {
|
||||
rx, tx := a.flexRadio.GetSlices()
|
||||
return map[string]int{"rx": rx, "tx": tx}
|
||||
}
|
||||
|
||||
// SetSliceConfig sets RX and TX slice indices (0=A, 1=B, ...).
|
||||
func (a *App) SetSliceConfig(rxIdx, txIdx int) {
|
||||
a.savedRxSlice = rxIdx
|
||||
a.savedTxSlice = txIdx
|
||||
a.flexRadio.SetSlices(rxIdx, txIdx)
|
||||
log.Printf("[FlexRadio] Slice config saved: RX=%d TX=%d", rxIdx, txIdx)
|
||||
}
|
||||
|
||||
// GetRotorStatus returns connection status.
|
||||
func (a *App) GetRotorStatus() bool {
|
||||
return a.rotorClient.IsConnected()
|
||||
}
|
||||
|
||||
// GetTLEAge returns the age of the TLE cache in hours.
|
||||
func (a *App) GetTLEAge() float64 {
|
||||
return a.tleManager.AgeHours()
|
||||
}
|
||||
|
||||
// GetGroundtrack returns lat/lon points for the next N minutes of orbit.
|
||||
func (a *App) GetGroundtrack(satName string, minutes float64) []propagator.GroundtrackPoint {
|
||||
sat := a.tleManager.Get(satName)
|
||||
if sat == nil {
|
||||
return nil
|
||||
}
|
||||
return a.propagator.ComputeGroundtrack(sat, time.Now(), minutes)
|
||||
}
|
||||
|
||||
// GetWatchlist returns the current list of satellites to display on map.
|
||||
func (a *App) GetWatchlist() []string {
|
||||
return a.watchlist
|
||||
}
|
||||
|
||||
// SetWatchlist sets which satellites to display on the map.
|
||||
func (a *App) SetWatchlist(names []string) {
|
||||
a.watchlist = names
|
||||
}
|
||||
92
backend/doppler/calculator.go
Normal file
92
backend/doppler/calculator.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package doppler
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"SatMaster/backend/propagator"
|
||||
)
|
||||
|
||||
const (
|
||||
SpeedOfLight = 299792.458 // km/s
|
||||
)
|
||||
|
||||
// Calculator computes Doppler-shifted frequencies.
|
||||
type Calculator struct {
|
||||
mu sync.RWMutex
|
||||
nominalDown float64 // Hz
|
||||
nominalUp float64 // Hz
|
||||
obsLat float64
|
||||
obsLon float64
|
||||
obsAlt float64
|
||||
}
|
||||
|
||||
func NewCalculator() *Calculator {
|
||||
return &Calculator{}
|
||||
}
|
||||
|
||||
func (c *Calculator) SetObserver(lat, lon, altM float64) {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
c.obsLat = lat
|
||||
c.obsLon = lon
|
||||
c.obsAlt = altM
|
||||
}
|
||||
|
||||
func (c *Calculator) SetNominal(downHz, upHz float64) {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
c.nominalDown = downHz
|
||||
c.nominalUp = upHz
|
||||
}
|
||||
|
||||
// Correct computes Doppler-corrected downlink and uplink frequencies.
|
||||
// Returns (downlinkHz, uplinkHz).
|
||||
func (c *Calculator) Correct(pos *propagator.SatPosition, obs propagator.Observer, _ time.Time) (float64, float64) {
|
||||
c.mu.RLock()
|
||||
nomDown := c.nominalDown
|
||||
nomUp := c.nominalUp
|
||||
c.mu.RUnlock()
|
||||
|
||||
if nomDown == 0 && nomUp == 0 {
|
||||
return 0, 0
|
||||
}
|
||||
if pos == nil {
|
||||
return nomDown, nomUp
|
||||
}
|
||||
|
||||
// Range rate in km/s (positive = receding, negative = approaching)
|
||||
rr := pos.RangeRate
|
||||
|
||||
// Doppler factor: f_received = f_nominal * (1 - v/c)
|
||||
// For downlink: satellite is the transmitter
|
||||
dopplerFactor := 1.0 - rr/SpeedOfLight
|
||||
|
||||
correctedDown := nomDown * dopplerFactor
|
||||
// For uplink: we pre-correct in reverse so the satellite receives nominal
|
||||
correctedUp := nomUp / dopplerFactor
|
||||
|
||||
return correctedDown, correctedUp
|
||||
}
|
||||
|
||||
// ShiftHz returns the Doppler shift in Hz for a given nominal frequency.
|
||||
func ShiftHz(nominalHz, rangeRateKmS float64) float64 {
|
||||
return nominalHz * (-rangeRateKmS / SpeedOfLight)
|
||||
}
|
||||
|
||||
// RangeRateFromPositions computes range rate from two consecutive positions.
|
||||
func RangeRateFromPositions(prev, curr *propagator.SatPosition, dt float64) float64 {
|
||||
prevRange := prev.Range
|
||||
currRange := curr.Range
|
||||
return (currRange - prevRange) / dt
|
||||
}
|
||||
|
||||
// FormatShift formats a Doppler shift in Hz for display.
|
||||
func FormatShift(shiftHz float64) string {
|
||||
if math.Abs(shiftHz) >= 1000 {
|
||||
return fmt.Sprintf("%+.2f kHz", shiftHz/1000)
|
||||
}
|
||||
return fmt.Sprintf("%+.0f Hz", shiftHz)
|
||||
}
|
||||
241
backend/flexradio/client.go
Normal file
241
backend/flexradio/client.go
Normal file
@@ -0,0 +1,241 @@
|
||||
package flexradio
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"log"
|
||||
"net"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Client manages a TCP connection to FlexRadio SmartSDR API.
|
||||
// For satellite operation:
|
||||
// - Slice A (index 0) = RX downlink (436 MHz for SO-50)
|
||||
// - Slice B (index 1) = TX uplink (145 MHz for SO-50)
|
||||
type Client struct {
|
||||
mu sync.Mutex
|
||||
conn net.Conn
|
||||
scanner *bufio.Scanner
|
||||
connected atomic.Bool
|
||||
seqNum uint32
|
||||
|
||||
rxSlice int // Slice index for RX (downlink) — default 0 = Slice A
|
||||
txSlice int // Slice index for TX (uplink) — default 1 = Slice B
|
||||
lastDownHz float64 // Last sent RX frequency (dead-band)
|
||||
lastUpHz float64 // Last sent TX frequency (dead-band)
|
||||
}
|
||||
|
||||
func NewClient() *Client {
|
||||
return &Client{rxSlice: 0, txSlice: 1} // defaults — always overridden by SetSliceConfig on connect
|
||||
}
|
||||
|
||||
// Connect establishes TCP connection to FlexRadio SmartSDR API (port 4992).
|
||||
func (c *Client) Connect(host string, port int) error {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
|
||||
if c.connected.Load() {
|
||||
c.disconnectLocked()
|
||||
}
|
||||
|
||||
addr := fmt.Sprintf("%s:%d", host, port)
|
||||
conn, err := net.DialTimeout("tcp", addr, 5*time.Second)
|
||||
if err != nil {
|
||||
return fmt.Errorf("connexion FlexRadio %s : %w", addr, err)
|
||||
}
|
||||
|
||||
c.conn = conn
|
||||
c.scanner = bufio.NewScanner(conn)
|
||||
c.connected.Store(true)
|
||||
|
||||
go c.readLoop()
|
||||
|
||||
log.Printf("[FlexRadio] Connecté à %s (RX=Slice %s, TX=Slice %s)",
|
||||
addr, sliceLetter(c.rxSlice), sliceLetter(c.txSlice))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Disconnect closes the connection.
|
||||
func (c *Client) Disconnect() {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
c.disconnectLocked()
|
||||
}
|
||||
|
||||
func (c *Client) disconnectLocked() {
|
||||
if c.conn != nil {
|
||||
c.conn.Close()
|
||||
c.conn = nil
|
||||
}
|
||||
c.connected.Store(false)
|
||||
log.Println("[FlexRadio] Déconnecté")
|
||||
}
|
||||
|
||||
func (c *Client) IsConnected() bool {
|
||||
return c.connected.Load()
|
||||
}
|
||||
|
||||
// SetFrequency applies Doppler-corrected frequencies to both slices.
|
||||
// downHz = corrected RX frequency → rxSlice (default Slice A)
|
||||
// upHz = corrected TX frequency → txSlice (default Slice B)
|
||||
// Only sends command if frequency changed by more than 1 Hz (dead-band)
|
||||
func (c *Client) SetFrequency(downHz, upHz float64) error {
|
||||
if !c.connected.Load() {
|
||||
return fmt.Errorf("non connecté")
|
||||
}
|
||||
|
||||
var errs []error
|
||||
|
||||
// RX downlink — use "slice t" (tune) command
|
||||
if downHz > 0 {
|
||||
downMHz := downHz / 1e6
|
||||
if abs(downHz-c.lastDownHz) >= 1.0 { // 1 Hz dead-band
|
||||
cmd := fmt.Sprintf("slice t %d %.6f", c.rxSlice, downMHz)
|
||||
if err := c.sendCommand(cmd); err != nil {
|
||||
errs = append(errs, err)
|
||||
} else {
|
||||
c.lastDownHz = downHz
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TX uplink — use "slice t" (tune) command
|
||||
if upHz > 0 {
|
||||
upMHz := upHz / 1e6
|
||||
if abs(upHz-c.lastUpHz) >= 1.0 {
|
||||
cmd := fmt.Sprintf("slice t %d %.6f", c.txSlice, upMHz)
|
||||
if err := c.sendCommand(cmd); err != nil {
|
||||
errs = append(errs, err)
|
||||
} else {
|
||||
c.lastUpHz = upHz
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(errs) > 0 {
|
||||
return fmt.Errorf("FlexRadio SetFrequency: %v", errs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ResetDeadband forces next frequency command to be sent regardless of change.
|
||||
func (c *Client) ResetDeadband() {
|
||||
c.lastDownHz = 0
|
||||
c.lastUpHz = 0
|
||||
}
|
||||
|
||||
func abs(x float64) float64 {
|
||||
if x < 0 {
|
||||
return -x
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
// SetSlices configures which slice indices to use for RX and TX.
|
||||
// Default: rx=0 (Slice A), tx=1 (Slice B)
|
||||
func (c *Client) SetSlices(rxIdx, txIdx int) {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
c.rxSlice = rxIdx
|
||||
c.txSlice = txIdx
|
||||
log.Printf("[FlexRadio] Slices configurées: RX=Slice %s, TX=Slice %s",
|
||||
sliceLetter(rxIdx), sliceLetter(txIdx))
|
||||
}
|
||||
|
||||
// GetSlices returns current RX and TX slice indices.
|
||||
func (c *Client) GetSlices() (rx, tx int) {
|
||||
return c.rxSlice, c.txSlice
|
||||
}
|
||||
|
||||
// QuerySlices sends a slice list request to discover available slices.
|
||||
func (c *Client) QuerySlices() error {
|
||||
return c.sendCommand("slice list")
|
||||
}
|
||||
|
||||
// SetMode sets the demodulation mode on RX and/or TX slices.
|
||||
// mode: "usb", "lsb", "cw", "am", "sam", "fm", "nfm", "dfm", "digl", "digu", "rtty"
|
||||
// For satellite: RX and TX usually have the same mode (FM for FM sats, LSB/USB for linear)
|
||||
func (c *Client) SetMode(mode string) error {
|
||||
if !c.connected.Load() {
|
||||
return fmt.Errorf("not connected")
|
||||
}
|
||||
mode = strings.ToLower(strings.TrimSpace(mode))
|
||||
var errs []error
|
||||
// Set mode on RX slice
|
||||
if err := c.sendCommand(fmt.Sprintf("slice s %d mode=%s", c.rxSlice, mode)); err != nil {
|
||||
errs = append(errs, fmt.Errorf("RX mode: %w", err))
|
||||
}
|
||||
// Set mode on TX slice (same mode for satellite split operation)
|
||||
if err := c.sendCommand(fmt.Sprintf("slice s %d mode=%s", c.txSlice, mode)); err != nil {
|
||||
errs = append(errs, fmt.Errorf("TX mode: %w", err))
|
||||
}
|
||||
if len(errs) > 0 {
|
||||
return fmt.Errorf("SetMode: %v", errs)
|
||||
}
|
||||
log.Printf("[FlexRadio] Mode set to %s on slices %s/%s",
|
||||
strings.ToUpper(mode), sliceLetter(c.rxSlice), sliceLetter(c.txSlice))
|
||||
return nil
|
||||
}
|
||||
|
||||
// SatModeToFlex converts a satellite DB mode string to FlexRadio mode.
|
||||
// FM satellites use "fm", linear transponders use "lsb" or "usb".
|
||||
func SatModeToFlex(satMode string) string {
|
||||
switch strings.ToUpper(strings.TrimSpace(satMode)) {
|
||||
case "FM", "NFM":
|
||||
return "fm"
|
||||
case "LSB":
|
||||
return "lsb"
|
||||
case "USB":
|
||||
return "usb"
|
||||
case "CW", "CW/DATA":
|
||||
return "cw"
|
||||
case "APRS", "DATA", "DIGI":
|
||||
return "digl"
|
||||
case "AM":
|
||||
return "am"
|
||||
default:
|
||||
return "usb" // safe default
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) sendCommand(cmd string) error {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
|
||||
if c.conn == nil {
|
||||
return fmt.Errorf("non connecté")
|
||||
}
|
||||
|
||||
seq := atomic.AddUint32(&c.seqNum, 1)
|
||||
line := fmt.Sprintf("C%d|%s\n", seq, cmd)
|
||||
|
||||
c.conn.SetWriteDeadline(time.Now().Add(2 * time.Second))
|
||||
_, err := fmt.Fprint(c.conn, line)
|
||||
if err != nil {
|
||||
c.connected.Store(false)
|
||||
return fmt.Errorf("envoi commande: %w", err)
|
||||
}
|
||||
log.Printf("[FlexRadio] → %s", strings.TrimSpace(line))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) readLoop() {
|
||||
for c.scanner.Scan() {
|
||||
line := c.scanner.Text()
|
||||
// Log ALL responses for debugging
|
||||
log.Printf("[FlexRadio] ← %s", line)
|
||||
}
|
||||
c.connected.Store(false)
|
||||
log.Println("[FlexRadio] Disconnected")
|
||||
}
|
||||
|
||||
func sliceLetter(idx int) string {
|
||||
letters := []string{"A", "B", "C", "D", "E", "F", "G", "H"}
|
||||
if idx < len(letters) {
|
||||
return letters[idx]
|
||||
}
|
||||
return fmt.Sprintf("%d", idx)
|
||||
}
|
||||
337
backend/propagator/engine.go
Normal file
337
backend/propagator/engine.go
Normal file
@@ -0,0 +1,337 @@
|
||||
package propagator
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"SatMaster/backend/tle"
|
||||
|
||||
"github.com/akhenakh/sgp4"
|
||||
)
|
||||
|
||||
const (
|
||||
EarthRadius = 6371.0 // km
|
||||
RadToDeg = 180.0 / math.Pi
|
||||
)
|
||||
|
||||
// SatPosition holds the computed position of a satellite at a given moment.
|
||||
type SatPosition struct {
|
||||
Name string `json:"name"`
|
||||
Latitude float64 `json:"lat"`
|
||||
Longitude float64 `json:"lon"`
|
||||
Altitude float64 `json:"alt"` // km
|
||||
Azimuth float64 `json:"az"` // degrees 0=N
|
||||
Elevation float64 `json:"el"` // degrees above horizon
|
||||
Range float64 `json:"range"` // km from observer
|
||||
RangeRate float64 `json:"rangeRate"` // km/s positive=receding
|
||||
Footprint float64 `json:"footprint"` // km radius on ground
|
||||
}
|
||||
|
||||
// Pass represents one overhead pass of a satellite.
|
||||
type Pass struct {
|
||||
SatName string `json:"satName"`
|
||||
AOS time.Time `json:"aos"`
|
||||
LOS time.Time `json:"los"`
|
||||
MaxEl float64 `json:"maxEl"`
|
||||
MaxElTime time.Time `json:"maxElTime"`
|
||||
AosAz float64 `json:"aosAz"`
|
||||
LosAz float64 `json:"losAz"`
|
||||
MaxElAz float64 `json:"maxElAz"`
|
||||
Duration float64 `json:"duration"` // seconds
|
||||
Points []PassPoint `json:"points"`
|
||||
}
|
||||
|
||||
// PassPoint is one az/el sample in a pass track.
|
||||
type PassPoint struct {
|
||||
Time time.Time `json:"t"`
|
||||
Az float64 `json:"az"`
|
||||
El float64 `json:"el"`
|
||||
}
|
||||
|
||||
// Observer holds the QTH location.
|
||||
type Observer struct {
|
||||
Lat float64 // degrees
|
||||
Lon float64 // degrees
|
||||
Alt float64 // meters above sea level
|
||||
}
|
||||
|
||||
// Engine propagates satellite positions using SGP4.
|
||||
type Engine struct {
|
||||
mu sync.RWMutex
|
||||
observer Observer
|
||||
}
|
||||
|
||||
func NewEngine() *Engine {
|
||||
return &Engine{
|
||||
observer: Observer{Lat: 45.75, Lon: 4.85, Alt: 200}, // Default: Lyon area
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Engine) SetObserver(lat, lon, altM float64) {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
e.observer = Observer{Lat: lat, Lon: lon, Alt: altM}
|
||||
}
|
||||
|
||||
func (e *Engine) ObserverPosition() Observer {
|
||||
e.mu.RLock()
|
||||
defer e.mu.RUnlock()
|
||||
return e.observer
|
||||
}
|
||||
|
||||
// Position computes the current position of one satellite.
|
||||
func (e *Engine) Position(sat *tle.Satellite, t time.Time) *SatPosition {
|
||||
e.mu.RLock()
|
||||
obs := e.observer
|
||||
e.mu.RUnlock()
|
||||
return computePosition(sat, obs, t)
|
||||
}
|
||||
|
||||
// AllPositions computes positions for all loaded satellites.
|
||||
func (e *Engine) AllPositions(sats []*tle.Satellite, t time.Time) []SatPosition {
|
||||
e.mu.RLock()
|
||||
obs := e.observer
|
||||
e.mu.RUnlock()
|
||||
|
||||
result := make([]SatPosition, 0, len(sats))
|
||||
for _, sat := range sats {
|
||||
if pos := computePosition(sat, obs, t); pos != nil {
|
||||
result = append(result, *pos)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// ComputePasses predicts upcoming passes over `hours` hours.
|
||||
func (e *Engine) ComputePasses(sat *tle.Satellite, start time.Time, hours float64) []Pass {
|
||||
e.mu.RLock()
|
||||
obs := e.observer
|
||||
e.mu.RUnlock()
|
||||
|
||||
var passes []Pass
|
||||
end := start.Add(time.Duration(float64(time.Hour) * hours))
|
||||
step := 10 * time.Second
|
||||
t := start
|
||||
|
||||
var inPass bool
|
||||
var cur *Pass
|
||||
|
||||
for t.Before(end) {
|
||||
pos := computePosition(sat, obs, t)
|
||||
if pos == nil {
|
||||
t = t.Add(step)
|
||||
continue
|
||||
}
|
||||
|
||||
if pos.Elevation > 0 && !inPass {
|
||||
inPass = true
|
||||
aosT := bisectAOS(sat, obs, t.Add(-step), t)
|
||||
aosPos := computePosition(sat, obs, aosT)
|
||||
if aosPos == nil {
|
||||
aosPos = pos
|
||||
}
|
||||
cur = &Pass{
|
||||
SatName: sat.Name,
|
||||
AOS: aosT,
|
||||
AosAz: aosPos.Azimuth,
|
||||
Points: []PassPoint{},
|
||||
}
|
||||
}
|
||||
|
||||
if inPass && cur != nil {
|
||||
cur.Points = append(cur.Points, PassPoint{Time: t, Az: pos.Azimuth, El: pos.Elevation})
|
||||
if pos.Elevation > cur.MaxEl {
|
||||
cur.MaxEl = pos.Elevation
|
||||
cur.MaxElTime = t
|
||||
cur.MaxElAz = pos.Azimuth
|
||||
}
|
||||
}
|
||||
|
||||
if pos.Elevation <= 0 && inPass {
|
||||
inPass = false
|
||||
if cur != nil {
|
||||
losT := bisectLOS(sat, obs, t.Add(-step), t)
|
||||
losPos := computePosition(sat, obs, losT)
|
||||
if losPos == nil {
|
||||
losPos = pos
|
||||
}
|
||||
cur.LOS = losT
|
||||
cur.LosAz = losPos.Azimuth
|
||||
cur.Duration = losT.Sub(cur.AOS).Seconds()
|
||||
if cur.MaxEl >= 1.0 {
|
||||
passes = append(passes, *cur)
|
||||
}
|
||||
cur = nil
|
||||
}
|
||||
}
|
||||
|
||||
if pos.Elevation < -15 {
|
||||
step = 30 * time.Second
|
||||
} else {
|
||||
step = 10 * time.Second
|
||||
}
|
||||
t = t.Add(step)
|
||||
}
|
||||
|
||||
sort.Slice(passes, func(i, j int) bool {
|
||||
return passes[i].AOS.Before(passes[j].AOS)
|
||||
})
|
||||
return passes
|
||||
}
|
||||
|
||||
// ─── Core SGP4 computation ────────────────────────────────────────────────────
|
||||
|
||||
// parsedTLE caches the orbital elements for a satellite TLE.
|
||||
// akhenakh/sgp4 ParseTLE accepts the 3-line TLE as a single string.
|
||||
func makeTLEString(sat *tle.Satellite) string {
|
||||
return fmt.Sprintf("%s\n%s\n%s", sat.Name, sat.TLE1, sat.TLE2)
|
||||
}
|
||||
|
||||
func computePosition(sat *tle.Satellite, obs Observer, t time.Time) *SatPosition {
|
||||
// Parse TLE — akhenakh/sgp4 accepts the 3-line block as one string
|
||||
tleObj, err := sgp4.ParseTLE(makeTLEString(sat))
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Propagate to time t → ECI state with geodetic fields
|
||||
eciState, err := tleObj.FindPositionAtTime(t)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Convert ECI to geodetic (lat deg, lon deg, alt km)
|
||||
lat, lon, altKm := eciState.ToGeodetic()
|
||||
|
||||
// Build observer location
|
||||
location := &sgp4.Location{
|
||||
Latitude: obs.Lat, // degrees
|
||||
Longitude: obs.Lon, // degrees
|
||||
Altitude: obs.Alt, // meters
|
||||
}
|
||||
|
||||
// Build state vector for look angle calculation
|
||||
sv := &sgp4.StateVector{
|
||||
X: eciState.Position.X,
|
||||
Y: eciState.Position.Y,
|
||||
Z: eciState.Position.Z,
|
||||
VX: eciState.Velocity.X,
|
||||
VY: eciState.Velocity.Y,
|
||||
VZ: eciState.Velocity.Z,
|
||||
}
|
||||
|
||||
// GetLookAngle uses eciState.DateTime (the propagated time) not t
|
||||
observation, err := sv.GetLookAngle(location, eciState.DateTime)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
la := observation.LookAngles
|
||||
footprint := EarthRadius * math.Acos(EarthRadius/(EarthRadius+altKm))
|
||||
|
||||
// Compute range rate by finite difference (2s) - library value is unreliable
|
||||
rangeRate := finiteRangeRate(tleObj, location, t)
|
||||
|
||||
return &SatPosition{
|
||||
Name: sat.Name,
|
||||
Latitude: lat,
|
||||
Longitude: lon,
|
||||
Altitude: altKm,
|
||||
Azimuth: la.Azimuth,
|
||||
Elevation: la.Elevation,
|
||||
Range: la.Range,
|
||||
RangeRate: rangeRate,
|
||||
Footprint: footprint,
|
||||
}
|
||||
}
|
||||
|
||||
// finiteRangeRate computes range rate (km/s) by finite difference over 2 seconds.
|
||||
// Positive = satellite receding, negative = approaching.
|
||||
func finiteRangeRate(tleObj *sgp4.TLE, loc *sgp4.Location, t time.Time) float64 {
|
||||
dt := 2.0 // seconds
|
||||
t2 := t.Add(time.Duration(dt * float64(time.Second)))
|
||||
|
||||
e1, err1 := tleObj.FindPositionAtTime(t)
|
||||
e2, err2 := tleObj.FindPositionAtTime(t2)
|
||||
if err1 != nil || err2 != nil {
|
||||
return 0
|
||||
}
|
||||
|
||||
sv1 := &sgp4.StateVector{X: e1.Position.X, Y: e1.Position.Y, Z: e1.Position.Z,
|
||||
VX: e1.Velocity.X, VY: e1.Velocity.Y, VZ: e1.Velocity.Z}
|
||||
sv2 := &sgp4.StateVector{X: e2.Position.X, Y: e2.Position.Y, Z: e2.Position.Z,
|
||||
VX: e2.Velocity.X, VY: e2.Velocity.Y, VZ: e2.Velocity.Z}
|
||||
|
||||
obs1, err1 := sv1.GetLookAngle(loc, e1.DateTime)
|
||||
obs2, err2 := sv2.GetLookAngle(loc, e2.DateTime)
|
||||
if err1 != nil || err2 != nil {
|
||||
return 0
|
||||
}
|
||||
|
||||
return (obs2.LookAngles.Range - obs1.LookAngles.Range) / dt
|
||||
}
|
||||
|
||||
// bisectAOS finds exact AOS time (precision: 1 second).
|
||||
func bisectAOS(sat *tle.Satellite, obs Observer, lo, hi time.Time) time.Time {
|
||||
for hi.Sub(lo) > time.Second {
|
||||
mid := lo.Add(hi.Sub(lo) / 2)
|
||||
pos := computePosition(sat, obs, mid)
|
||||
if pos != nil && pos.Elevation > 0 {
|
||||
hi = mid
|
||||
} else {
|
||||
lo = mid
|
||||
}
|
||||
}
|
||||
return hi
|
||||
}
|
||||
|
||||
// bisectLOS finds exact LOS time (precision: 1 second).
|
||||
func bisectLOS(sat *tle.Satellite, obs Observer, lo, hi time.Time) time.Time {
|
||||
for hi.Sub(lo) > time.Second {
|
||||
mid := lo.Add(hi.Sub(lo) / 2)
|
||||
pos := computePosition(sat, obs, mid)
|
||||
if pos != nil && pos.Elevation > 0 {
|
||||
lo = mid
|
||||
} else {
|
||||
hi = mid
|
||||
}
|
||||
}
|
||||
return lo
|
||||
}
|
||||
|
||||
// GroundtrackPoint is a lat/lon position at a given time.
|
||||
type GroundtrackPoint struct {
|
||||
Lat float64 `json:"lat"`
|
||||
Lon float64 `json:"lon"`
|
||||
Time time.Time `json:"t"`
|
||||
}
|
||||
|
||||
// ComputeGroundtrack returns the future orbit track for the next `minutes` minutes.
|
||||
// Points every 30 seconds. Handles the antimeridian by splitting segments.
|
||||
func (e *Engine) ComputeGroundtrack(sat *tle.Satellite, start time.Time, minutes float64) []GroundtrackPoint {
|
||||
e.mu.RLock()
|
||||
defer e.mu.RUnlock()
|
||||
|
||||
var points []GroundtrackPoint
|
||||
end := start.Add(time.Duration(float64(time.Minute) * minutes))
|
||||
step := 30 * time.Second
|
||||
|
||||
// Parse TLE once outside the loop for efficiency
|
||||
tleObj, err := sgp4.ParseTLE(fmt.Sprintf("%s\n%s\n%s", sat.Name, sat.TLE1, sat.TLE2))
|
||||
if err != nil {
|
||||
return points
|
||||
}
|
||||
|
||||
for t := start; t.Before(end); t = t.Add(step) {
|
||||
eciState, err := tleObj.FindPositionAtTime(t)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
lat, lon, _ := eciState.ToGeodetic()
|
||||
points = append(points, GroundtrackPoint{Lat: lat, Lon: lon, Time: t})
|
||||
}
|
||||
return points
|
||||
}
|
||||
220
backend/rotor/pstrotator.go
Normal file
220
backend/rotor/pstrotator.go
Normal file
@@ -0,0 +1,220 @@
|
||||
package rotor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"math"
|
||||
"net"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
)
|
||||
|
||||
// PstRotatorClient controls PstRotator via UDP XML protocol.
|
||||
// Protocol: send XML to port N, responses come back on port N+1
|
||||
// Format: <AZIMUTH>180.0</AZIMUTH> or <ELEVATION>45.0</ELEVATION>
|
||||
// Multiple commands: <AZIMUTH>180.0</AZIMUTH><ELEVATION>45.0</ELEVATION>
|
||||
type PstRotatorClient struct {
|
||||
mu sync.Mutex
|
||||
conn *net.UDPConn
|
||||
addr *net.UDPAddr
|
||||
connected atomic.Bool
|
||||
lastAz float64
|
||||
lastEl float64
|
||||
azThreshold float64
|
||||
elThreshold float64
|
||||
}
|
||||
|
||||
func NewPstRotatorClient() *PstRotatorClient {
|
||||
return &PstRotatorClient{
|
||||
azThreshold: 5.0, // 5° dead-band for Az
|
||||
elThreshold: 5.0,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *PstRotatorClient) Connect(host string, port int) error {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
if r.connected.Load() {
|
||||
r.disconnectLocked()
|
||||
}
|
||||
|
||||
addr, err := net.ResolveUDPAddr("udp", fmt.Sprintf("%s:%d", host, port))
|
||||
if err != nil {
|
||||
return fmt.Errorf("resolve UDP addr: %w", err)
|
||||
}
|
||||
|
||||
conn, err := net.DialUDP("udp", nil, addr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("UDP connect to PstRotator: %w", err)
|
||||
}
|
||||
|
||||
r.conn = conn
|
||||
r.addr = addr
|
||||
r.connected.Store(true)
|
||||
r.lastAz = -999
|
||||
r.lastEl = -999
|
||||
|
||||
log.Printf("[Rotor] Connected to PstRotator at %s:%d", host, port)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *PstRotatorClient) Disconnect() {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
r.disconnectLocked()
|
||||
}
|
||||
|
||||
func (r *PstRotatorClient) disconnectLocked() {
|
||||
if r.conn != nil {
|
||||
r.conn.Close()
|
||||
r.conn = nil
|
||||
}
|
||||
r.connected.Store(false)
|
||||
log.Println("[Rotor] Disconnected")
|
||||
}
|
||||
|
||||
func (r *PstRotatorClient) IsConnected() bool {
|
||||
return r.connected.Load()
|
||||
}
|
||||
|
||||
// SetAzEl sends azimuth (and optionally elevation) to PstRotator.
|
||||
// El < 0 is treated as azimuth-only (no elevation rotor).
|
||||
func (r *PstRotatorClient) SetAzEl(az, el float64) error {
|
||||
if !r.connected.Load() {
|
||||
return fmt.Errorf("not connected to PstRotator")
|
||||
}
|
||||
|
||||
// Normalize azimuth 0-360
|
||||
az = math.Mod(az, 360.0)
|
||||
if az < 0 {
|
||||
az += 360
|
||||
}
|
||||
// Clamp elevation
|
||||
if el < 0 {
|
||||
el = 0
|
||||
}
|
||||
if el > 90 {
|
||||
el = 90
|
||||
}
|
||||
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
if r.conn == nil {
|
||||
return fmt.Errorf("not connected")
|
||||
}
|
||||
|
||||
// Dead-band check (inside mutex — no race on lastAz/lastEl)
|
||||
azChanged := math.Abs(az-r.lastAz) >= r.azThreshold
|
||||
elChanged := math.Abs(el-r.lastEl) >= r.elThreshold
|
||||
|
||||
if !azChanged && !elChanged {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Build XML command wrapped in <PST>...</PST>
|
||||
var cmd string
|
||||
if azChanged && elChanged {
|
||||
cmd = fmt.Sprintf("<PST><AZIMUTH>%.0f</AZIMUTH><ELEVATION>%.0f</ELEVATION></PST>", az, el)
|
||||
} else if azChanged {
|
||||
cmd = fmt.Sprintf("<PST><AZIMUTH>%.0f</AZIMUTH></PST>", az)
|
||||
} else {
|
||||
cmd = fmt.Sprintf("<PST><ELEVATION>%.0f</ELEVATION></PST>", el)
|
||||
}
|
||||
|
||||
log.Printf("[Rotor] UDP → %s", cmd)
|
||||
r.conn.SetWriteDeadline(time.Now().Add(2 * time.Second))
|
||||
if _, err := r.conn.Write([]byte(cmd)); err != nil {
|
||||
r.connected.Store(false)
|
||||
return fmt.Errorf("UDP write: %w", err)
|
||||
}
|
||||
|
||||
if azChanged {
|
||||
r.lastAz = az
|
||||
}
|
||||
if elChanged {
|
||||
r.lastEl = el
|
||||
}
|
||||
log.Printf("[Rotor] → AZ=%.1f° EL=%.1f°", az, el)
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetAzOnly sends only azimuth — for stations with no elevation rotor.
|
||||
func (r *PstRotatorClient) SetAzOnly(az float64) error {
|
||||
if !r.connected.Load() {
|
||||
return fmt.Errorf("not connected")
|
||||
}
|
||||
az = math.Mod(az, 360.0)
|
||||
if az < 0 {
|
||||
az += 360
|
||||
}
|
||||
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
if r.conn == nil {
|
||||
return fmt.Errorf("not connected")
|
||||
}
|
||||
|
||||
// Dead-band check (inside mutex — no race on lastAz)
|
||||
if math.Abs(az-r.lastAz) < r.azThreshold {
|
||||
return nil
|
||||
}
|
||||
|
||||
cmd := fmt.Sprintf("<PST><AZIMUTH>%.0f</AZIMUTH></PST>", az)
|
||||
log.Printf("[Rotor] UDP → %s", cmd)
|
||||
r.conn.SetWriteDeadline(time.Now().Add(2 * time.Second))
|
||||
if _, err := r.conn.Write([]byte(cmd)); err != nil {
|
||||
r.connected.Store(false)
|
||||
return fmt.Errorf("UDP write: %w", err)
|
||||
}
|
||||
r.lastAz = az
|
||||
log.Printf("[Rotor] → AZ=%.1f°", az)
|
||||
return nil
|
||||
}
|
||||
|
||||
// StopRotor sends stop command.
|
||||
func (r *PstRotatorClient) StopRotor() error {
|
||||
return r.sendRaw("<PST><STOP>1</STOP></PST>")
|
||||
}
|
||||
|
||||
// QueryAzEl requests current position — answer comes back on port+1.
|
||||
func (r *PstRotatorClient) QueryAzEl() error {
|
||||
return r.sendRaw("<AZ?>1</AZ?><EL?>1</EL?>")
|
||||
}
|
||||
|
||||
func (r *PstRotatorClient) sendRaw(cmd string) error {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
if r.conn == nil {
|
||||
return fmt.Errorf("not connected")
|
||||
}
|
||||
|
||||
log.Printf("[Rotor] UDP → %s", cmd)
|
||||
r.conn.SetWriteDeadline(time.Now().Add(2 * time.Second))
|
||||
n, err := r.conn.Write([]byte(cmd))
|
||||
if err != nil {
|
||||
r.connected.Store(false)
|
||||
return fmt.Errorf("UDP write: %w", err)
|
||||
}
|
||||
log.Printf("[Rotor] UDP sent %d bytes to %s", n, r.addr)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ResetDeadband forces next azimuth command to be sent immediately.
|
||||
func (r *PstRotatorClient) ResetDeadband() {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
r.lastAz = -999
|
||||
r.lastEl = -999
|
||||
}
|
||||
|
||||
func (r *PstRotatorClient) SetThresholds(azDeg, elDeg float64) {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
r.azThreshold = azDeg
|
||||
r.elThreshold = elDeg
|
||||
}
|
||||
283
backend/tle/manager.go
Normal file
283
backend/tle/manager.go
Normal file
@@ -0,0 +1,283 @@
|
||||
package tle
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
// Celestrak amateur satellite TLE feed (primary)
|
||||
PrimaryURL = "https://celestrak.org/NORAD/elements/gp.php?GROUP=amateur&FORMAT=tle"
|
||||
FallbackURL = "http://tle.pe0sat.nl/kepler/amateur.txt"
|
||||
CacheFile = "satmaster_tle_cache.txt"
|
||||
MaxAgeDays = 3
|
||||
)
|
||||
|
||||
// Satellite holds parsed TLE data.
|
||||
type Satellite struct {
|
||||
Name string
|
||||
TLE1 string
|
||||
TLE2 string
|
||||
}
|
||||
|
||||
// Manager handles TLE fetching, caching, and lookup.
|
||||
type Manager struct {
|
||||
mu sync.RWMutex
|
||||
satellites map[string]*Satellite
|
||||
fetchedAt time.Time
|
||||
cacheDir string
|
||||
}
|
||||
|
||||
func NewManager() *Manager {
|
||||
cacheDir, _ := os.UserCacheDir()
|
||||
cacheDir = filepath.Join(cacheDir, "SatMaster")
|
||||
os.MkdirAll(cacheDir, 0755)
|
||||
return &Manager{
|
||||
satellites: make(map[string]*Satellite),
|
||||
cacheDir: cacheDir,
|
||||
}
|
||||
}
|
||||
|
||||
// fetchURLResult holds the result of a single URL fetch.
|
||||
type fetchURLResult struct {
|
||||
url string
|
||||
sats map[string]*Satellite
|
||||
raw string
|
||||
err error
|
||||
}
|
||||
|
||||
// FetchAndCache downloads TLE data from all sources in parallel, merges them
|
||||
// (primary takes precedence on duplicates), and saves to disk cache.
|
||||
func (m *Manager) FetchAndCache() error {
|
||||
urls := []string{PrimaryURL, FallbackURL}
|
||||
results := make([]fetchURLResult, len(urls))
|
||||
|
||||
// Fetch all sources concurrently
|
||||
var wg sync.WaitGroup
|
||||
for i, url := range urls {
|
||||
wg.Add(1)
|
||||
go func(i int, url string) {
|
||||
defer wg.Done()
|
||||
log.Printf("[TLE] Fetching from %s", url)
|
||||
data, err := fetchURL(url)
|
||||
if err != nil {
|
||||
log.Printf("[TLE] Failed %s: %v", url, err)
|
||||
results[i] = fetchURLResult{url: url, err: err}
|
||||
return
|
||||
}
|
||||
sats, err := parseTLE(data)
|
||||
if err != nil || len(sats) == 0 {
|
||||
log.Printf("[TLE] Parse failed or empty from %s: %v", url, err)
|
||||
results[i] = fetchURLResult{url: url, err: fmt.Errorf("parse failed")}
|
||||
return
|
||||
}
|
||||
log.Printf("[TLE] Fetched %d satellites from %s", len(sats), url)
|
||||
results[i] = fetchURLResult{url: url, sats: sats, raw: data}
|
||||
}(i, url)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
// Merge: start with fallback (lower priority), then overlay primary
|
||||
// This way primary always wins on duplicates
|
||||
merged := make(map[string]*Satellite)
|
||||
var combinedRaw strings.Builder
|
||||
anySuccess := false
|
||||
|
||||
for i := len(results) - 1; i >= 0; i-- {
|
||||
r := results[i]
|
||||
if r.err != nil || r.sats == nil {
|
||||
continue
|
||||
}
|
||||
anySuccess = true
|
||||
added := 0
|
||||
for k, v := range r.sats {
|
||||
if _, exists := merged[k]; !exists {
|
||||
merged[k] = v
|
||||
added++
|
||||
}
|
||||
}
|
||||
combinedRaw.WriteString(r.raw)
|
||||
log.Printf("[TLE] Merged %d new satellites from %s (total: %d)", added, r.url, len(merged))
|
||||
}
|
||||
|
||||
if !anySuccess {
|
||||
return fmt.Errorf("all TLE sources failed")
|
||||
}
|
||||
|
||||
// Save combined raw data to disk cache
|
||||
cachePath := filepath.Join(m.cacheDir, CacheFile)
|
||||
if err := os.WriteFile(cachePath, []byte(combinedRaw.String()), 0644); err != nil {
|
||||
log.Printf("[TLE] Cache write failed: %v", err)
|
||||
}
|
||||
|
||||
m.mu.Lock()
|
||||
m.satellites = merged
|
||||
m.fetchedAt = time.Now()
|
||||
m.mu.Unlock()
|
||||
|
||||
log.Printf("[TLE] Total: %d satellites loaded from %d sources", len(merged), len(urls))
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadLocal loads TLE data from disk cache.
|
||||
func (m *Manager) LoadLocal() error {
|
||||
cachePath := filepath.Join(m.cacheDir, CacheFile)
|
||||
data, err := os.ReadFile(cachePath)
|
||||
if err != nil {
|
||||
return m.loadBundledFallback()
|
||||
}
|
||||
sats, err := parseTLE(string(data))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
info, _ := os.Stat(cachePath)
|
||||
m.mu.Lock()
|
||||
m.satellites = sats
|
||||
if info != nil {
|
||||
m.fetchedAt = info.ModTime()
|
||||
}
|
||||
m.mu.Unlock()
|
||||
log.Printf("[TLE] Loaded %d satellites from local cache", len(sats))
|
||||
return nil
|
||||
}
|
||||
|
||||
// loadBundledFallback loads a minimal built-in TLE set for common amateur sats.
|
||||
func (m *Manager) loadBundledFallback() error {
|
||||
sats, err := parseTLE(defaultTLEs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.mu.Lock()
|
||||
m.satellites = sats
|
||||
m.fetchedAt = time.Now().Add(-48 * time.Hour)
|
||||
m.mu.Unlock()
|
||||
log.Printf("[TLE] Loaded %d bundled fallback satellites", len(sats))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get returns a satellite by name (case-insensitive).
|
||||
func (m *Manager) Get(name string) *Satellite {
|
||||
m.mu.RLock()
|
||||
defer m.mu.RUnlock()
|
||||
if s, ok := m.satellites[strings.ToUpper(name)]; ok {
|
||||
return s
|
||||
}
|
||||
upper := strings.ToUpper(name)
|
||||
for k, v := range m.satellites {
|
||||
if strings.Contains(k, upper) {
|
||||
return v
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// All returns all loaded satellites.
|
||||
func (m *Manager) All() []*Satellite {
|
||||
m.mu.RLock()
|
||||
defer m.mu.RUnlock()
|
||||
result := make([]*Satellite, 0, len(m.satellites))
|
||||
for _, s := range m.satellites {
|
||||
result = append(result, s)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// SatelliteNames returns sorted list of satellite names.
|
||||
func (m *Manager) SatelliteNames() []string {
|
||||
m.mu.RLock()
|
||||
defer m.mu.RUnlock()
|
||||
names := make([]string, 0, len(m.satellites))
|
||||
for k := range m.satellites {
|
||||
names = append(names, k)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// AgeHours returns how many hours old the TLE data is.
|
||||
func (m *Manager) AgeHours() float64 {
|
||||
m.mu.RLock()
|
||||
defer m.mu.RUnlock()
|
||||
if m.fetchedAt.IsZero() {
|
||||
return 9999
|
||||
}
|
||||
return time.Since(m.fetchedAt).Hours()
|
||||
}
|
||||
|
||||
func fetchURL(url string) (string, error) {
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
resp, err := client.Get(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != 200 {
|
||||
return "", fmt.Errorf("HTTP %d", resp.StatusCode)
|
||||
}
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
return string(body), err
|
||||
}
|
||||
|
||||
func parseTLE(data string) (map[string]*Satellite, error) {
|
||||
sats := make(map[string]*Satellite)
|
||||
scanner := bufio.NewScanner(strings.NewReader(data))
|
||||
var lines []string
|
||||
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
lines = append(lines, line)
|
||||
}
|
||||
|
||||
for i := 0; i+2 < len(lines); i += 3 {
|
||||
name := strings.TrimSpace(lines[i])
|
||||
tle1 := strings.TrimSpace(lines[i+1])
|
||||
tle2 := strings.TrimSpace(lines[i+2])
|
||||
|
||||
if !strings.HasPrefix(tle1, "1 ") || !strings.HasPrefix(tle2, "2 ") {
|
||||
i -= 2
|
||||
continue
|
||||
}
|
||||
|
||||
key := strings.ToUpper(name)
|
||||
sats[key] = &Satellite{
|
||||
Name: name,
|
||||
TLE1: tle1,
|
||||
TLE2: tle2,
|
||||
}
|
||||
}
|
||||
|
||||
if len(sats) == 0 {
|
||||
return nil, fmt.Errorf("no valid TLE entries found")
|
||||
}
|
||||
return sats, nil
|
||||
}
|
||||
|
||||
const defaultTLEs = `ISS (ZARYA)
|
||||
1 25544U 98067A 24001.50000000 .00016717 00000-0 10270-3 0 9999
|
||||
2 25544 51.6416 247.4627 0006703 130.5360 325.0288 15.50174753471696
|
||||
AO-7
|
||||
1 07530U 74089B 24001.50000000 -.00000023 00000-0 13694-4 0 9999
|
||||
2 07530 101.7044 116.7600 0012184 36.7810 323.4116 12.53590024534125
|
||||
AO-27
|
||||
1 22825U 93061C 24001.50000000 .00000199 00000-0 54717-4 0 9999
|
||||
2 22825 98.6398 100.2553 0008530 335.2107 24.8705 14.29831944581475
|
||||
SO-50
|
||||
1 27607U 02058C 24001.50000000 .00000306 00000-0 71007-4 0 9999
|
||||
2 27607 98.0070 105.9740 0084804 339.5060 19.9560 14.74561589148781
|
||||
FO-29
|
||||
1 24278U 96046B 24001.50000000 .00000056 00000-0 68723-5 0 9999
|
||||
2 24278 98.5355 100.4746 0350771 334.0040 24.2890 13.52863590383849
|
||||
RS-44
|
||||
1 44909U 19096E 24001.50000000 .00000103 00000-0 12583-4 0 9999
|
||||
2 44909 97.6561 101.2437 0009786 341.6020 18.4590 14.93614256218765
|
||||
`
|
||||
BIN
build/appicon.png
Normal file
BIN
build/appicon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 38 KiB |
BIN
build/bin/SatMaster.exe
Normal file
BIN
build/bin/SatMaster.exe
Normal file
Binary file not shown.
BIN
build/windows/icon.ico
Normal file
BIN
build/windows/icon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 66 KiB |
15
build/windows/info.json
Normal file
15
build/windows/info.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"fixed": {
|
||||
"file_version": "{{.Info.ProductVersion}}"
|
||||
},
|
||||
"info": {
|
||||
"0000": {
|
||||
"ProductVersion": "{{.Info.ProductVersion}}",
|
||||
"CompanyName": "{{.Info.CompanyName}}",
|
||||
"FileDescription": "{{.Info.ProductName}}",
|
||||
"LegalCopyright": "{{.Info.Copyright}}",
|
||||
"ProductName": "{{.Info.ProductName}}",
|
||||
"Comments": "{{.Info.Comments}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
15
build/windows/wails.exe.manifest
Normal file
15
build/windows/wails.exe.manifest
Normal file
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly manifestVersion="1.0" xmlns="urn:schemas-microsoft-com:asm.v1" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
|
||||
<assemblyIdentity type="win32" name="com.wails.{{.Name}}" version="{{.Info.ProductVersion}}.0" processorArchitecture="*"/>
|
||||
<dependency>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity type="win32" name="Microsoft.Windows.Common-Controls" version="6.0.0.0" processorArchitecture="*" publicKeyToken="6595b64144ccf1df" language="*"/>
|
||||
</dependentAssembly>
|
||||
</dependency>
|
||||
<asmv3:application>
|
||||
<asmv3:windowsSettings>
|
||||
<dpiAware xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">true/pm</dpiAware> <!-- fallback for Windows 7 and 8 -->
|
||||
<dpiAwareness xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">permonitorv2,permonitor</dpiAwareness> <!-- falls back to per-monitor if per-monitor v2 is not supported -->
|
||||
</asmv3:windowsSettings>
|
||||
</asmv3:application>
|
||||
</assembly>
|
||||
18
frontend/dist/assets/index-BB3NBLW5.js
vendored
Normal file
18
frontend/dist/assets/index-BB3NBLW5.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
frontend/dist/assets/index-axwnIj0t.css
vendored
Normal file
1
frontend/dist/assets/index-axwnIj0t.css
vendored
Normal file
File diff suppressed because one or more lines are too long
4
frontend/dist/assets/leaflet-src-DoEXxWUO.js
vendored
Normal file
4
frontend/dist/assets/leaflet-src-DoEXxWUO.js
vendored
Normal file
File diff suppressed because one or more lines are too long
19
frontend/dist/index.html
vendored
Normal file
19
frontend/dist/index.html
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>SatMaster — F4BPO</title>
|
||||
<!-- Leaflet CSS loaded via Vite in WorldMap component -->
|
||||
<style>
|
||||
* { margin: 0; padding: 0; box-sizing: border-box; }
|
||||
html, body, #app { width: 100%; height: 100%; overflow: hidden; }
|
||||
body { background: #080c16; }
|
||||
</style>
|
||||
<script type="module" crossorigin src="/assets/index-BB3NBLW5.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="/assets/index-axwnIj0t.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
</body>
|
||||
</html>
|
||||
18
frontend/index.html
Normal file
18
frontend/index.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>SatMaster — F4BPO</title>
|
||||
<!-- Leaflet CSS loaded via Vite in WorldMap component -->
|
||||
<style>
|
||||
* { margin: 0; padding: 0; box-sizing: border-box; }
|
||||
html, body, #app { width: 100%; height: 100%; overflow: hidden; }
|
||||
body { background: #080c16; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
16
frontend/node_modules/.bin/acorn
generated
vendored
Normal file
16
frontend/node_modules/.bin/acorn
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../acorn/bin/acorn" "$@"
|
||||
else
|
||||
exec node "$basedir/../acorn/bin/acorn" "$@"
|
||||
fi
|
||||
17
frontend/node_modules/.bin/acorn.cmd
generated
vendored
Normal file
17
frontend/node_modules/.bin/acorn.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\acorn\bin\acorn" %*
|
||||
28
frontend/node_modules/.bin/acorn.ps1
generated
vendored
Normal file
28
frontend/node_modules/.bin/acorn.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../acorn/bin/acorn" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../acorn/bin/acorn" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../acorn/bin/acorn" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../acorn/bin/acorn" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
16
frontend/node_modules/.bin/esbuild
generated
vendored
Normal file
16
frontend/node_modules/.bin/esbuild
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../esbuild/bin/esbuild" "$@"
|
||||
else
|
||||
exec node "$basedir/../esbuild/bin/esbuild" "$@"
|
||||
fi
|
||||
17
frontend/node_modules/.bin/esbuild.cmd
generated
vendored
Normal file
17
frontend/node_modules/.bin/esbuild.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\esbuild\bin\esbuild" %*
|
||||
28
frontend/node_modules/.bin/esbuild.ps1
generated
vendored
Normal file
28
frontend/node_modules/.bin/esbuild.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../esbuild/bin/esbuild" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../esbuild/bin/esbuild" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../esbuild/bin/esbuild" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../esbuild/bin/esbuild" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
16
frontend/node_modules/.bin/nanoid
generated
vendored
Normal file
16
frontend/node_modules/.bin/nanoid
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../nanoid/bin/nanoid.cjs" "$@"
|
||||
else
|
||||
exec node "$basedir/../nanoid/bin/nanoid.cjs" "$@"
|
||||
fi
|
||||
17
frontend/node_modules/.bin/nanoid.cmd
generated
vendored
Normal file
17
frontend/node_modules/.bin/nanoid.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nanoid\bin\nanoid.cjs" %*
|
||||
28
frontend/node_modules/.bin/nanoid.ps1
generated
vendored
Normal file
28
frontend/node_modules/.bin/nanoid.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../nanoid/bin/nanoid.cjs" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
16
frontend/node_modules/.bin/rollup
generated
vendored
Normal file
16
frontend/node_modules/.bin/rollup
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../rollup/dist/bin/rollup" "$@"
|
||||
else
|
||||
exec node "$basedir/../rollup/dist/bin/rollup" "$@"
|
||||
fi
|
||||
17
frontend/node_modules/.bin/rollup.cmd
generated
vendored
Normal file
17
frontend/node_modules/.bin/rollup.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rollup\dist\bin\rollup" %*
|
||||
28
frontend/node_modules/.bin/rollup.ps1
generated
vendored
Normal file
28
frontend/node_modules/.bin/rollup.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../rollup/dist/bin/rollup" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../rollup/dist/bin/rollup" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../rollup/dist/bin/rollup" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../rollup/dist/bin/rollup" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
16
frontend/node_modules/.bin/tsc
generated
vendored
Normal file
16
frontend/node_modules/.bin/tsc
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../typescript/bin/tsc" "$@"
|
||||
else
|
||||
exec node "$basedir/../typescript/bin/tsc" "$@"
|
||||
fi
|
||||
17
frontend/node_modules/.bin/tsc.cmd
generated
vendored
Normal file
17
frontend/node_modules/.bin/tsc.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\typescript\bin\tsc" %*
|
||||
28
frontend/node_modules/.bin/tsc.ps1
generated
vendored
Normal file
28
frontend/node_modules/.bin/tsc.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../typescript/bin/tsc" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../typescript/bin/tsc" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../typescript/bin/tsc" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../typescript/bin/tsc" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
16
frontend/node_modules/.bin/tsserver
generated
vendored
Normal file
16
frontend/node_modules/.bin/tsserver
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../typescript/bin/tsserver" "$@"
|
||||
else
|
||||
exec node "$basedir/../typescript/bin/tsserver" "$@"
|
||||
fi
|
||||
17
frontend/node_modules/.bin/tsserver.cmd
generated
vendored
Normal file
17
frontend/node_modules/.bin/tsserver.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\typescript\bin\tsserver" %*
|
||||
28
frontend/node_modules/.bin/tsserver.ps1
generated
vendored
Normal file
28
frontend/node_modules/.bin/tsserver.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../typescript/bin/tsserver" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../typescript/bin/tsserver" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../typescript/bin/tsserver" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../typescript/bin/tsserver" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
16
frontend/node_modules/.bin/vite
generated
vendored
Normal file
16
frontend/node_modules/.bin/vite
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../vite/bin/vite.js" "$@"
|
||||
else
|
||||
exec node "$basedir/../vite/bin/vite.js" "$@"
|
||||
fi
|
||||
17
frontend/node_modules/.bin/vite.cmd
generated
vendored
Normal file
17
frontend/node_modules/.bin/vite.cmd
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\vite\bin\vite.js" %*
|
||||
28
frontend/node_modules/.bin/vite.ps1
generated
vendored
Normal file
28
frontend/node_modules/.bin/vite.ps1
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../vite/bin/vite.js" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../vite/bin/vite.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../vite/bin/vite.js" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../vite/bin/vite.js" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
606
frontend/node_modules/.package-lock.json
generated
vendored
Normal file
606
frontend/node_modules/.package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,606 @@
|
||||
{
|
||||
"name": "satmaster-frontend",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@ampproject/remapping": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
|
||||
"integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@jridgewell/gen-mapping": "^0.3.5",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-x64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
|
||||
"integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/gen-mapping": {
|
||||
"version": "0.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
|
||||
"integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/sourcemap-codec": "^1.5.0",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/resolve-uri": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
|
||||
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/sourcemap-codec": {
|
||||
"version": "1.5.5",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
|
||||
"integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@jridgewell/trace-mapping": {
|
||||
"version": "0.3.31",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
|
||||
"integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/resolve-uri": "^3.1.0",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-x64-gnu": {
|
||||
"version": "4.60.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.0.tgz",
|
||||
"integrity": "sha512-RdcryEfzZr+lAr5kRm2ucN9aVlCCa2QNq4hXelZxb8GG0NJSazq44Z3PCCc8wISRuCVnGs0lQJVX5Vp6fKA+IA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-x64-msvc": {
|
||||
"version": "4.60.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.0.tgz",
|
||||
"integrity": "sha512-PrsWNQ8BuE00O3Xsx3ALh2Df8fAj9+cvvX9AIA6o4KpATR98c9mud4XtDWVvsEuyia5U4tVSTKygawyJkjm60w==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@sveltejs/vite-plugin-svelte": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-3.1.2.tgz",
|
||||
"integrity": "sha512-Txsm1tJvtiYeLUVRNqxZGKR/mI+CzuIQuc2gn+YCs9rMTowpNZ2Nqt53JdL8KF9bLhAf2ruR/dr9eZCwdTriRA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@sveltejs/vite-plugin-svelte-inspector": "^2.1.0",
|
||||
"debug": "^4.3.4",
|
||||
"deepmerge": "^4.3.1",
|
||||
"kleur": "^4.1.5",
|
||||
"magic-string": "^0.30.10",
|
||||
"svelte-hmr": "^0.16.0",
|
||||
"vitefu": "^0.2.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.0.0 || >=20"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"svelte": "^4.0.0 || ^5.0.0-next.0",
|
||||
"vite": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@sveltejs/vite-plugin-svelte-inspector": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-2.1.0.tgz",
|
||||
"integrity": "sha512-9QX28IymvBlSCqsCll5t0kQVxipsfhFFL+L2t3nTWfXnddYwxBuAEtTtlaVQpRz9c37BhJjltSeY4AJSC03SSg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.0.0 || >=20"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@sveltejs/vite-plugin-svelte": "^3.0.0",
|
||||
"svelte": "^4.0.0 || ^5.0.0-next.0",
|
||||
"vite": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tsconfig/svelte": {
|
||||
"version": "5.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@tsconfig/svelte/-/svelte-5.0.8.tgz",
|
||||
"integrity": "sha512-UkNnw1/oFEfecR8ypyHIQuWYdkPvHiwcQ78sh+ymIiYoF+uc5H1UBetbjyqT+vgGJ3qQN6nhucJviX6HesWtKQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
|
||||
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/acorn": {
|
||||
"version": "8.16.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz",
|
||||
"integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aria-query": {
|
||||
"version": "5.3.2",
|
||||
"resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz",
|
||||
"integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/axobject-query": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz",
|
||||
"integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/code-red": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/code-red/-/code-red-1.0.4.tgz",
|
||||
"integrity": "sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/sourcemap-codec": "^1.4.15",
|
||||
"@types/estree": "^1.0.1",
|
||||
"acorn": "^8.10.0",
|
||||
"estree-walker": "^3.0.3",
|
||||
"periscopic": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/css-tree": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz",
|
||||
"integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mdn-data": "2.0.30",
|
||||
"source-map-js": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.3",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
|
||||
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/deepmerge": {
|
||||
"version": "4.3.1",
|
||||
"resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz",
|
||||
"integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
|
||||
"integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"esbuild": "bin/esbuild"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@esbuild/aix-ppc64": "0.21.5",
|
||||
"@esbuild/android-arm": "0.21.5",
|
||||
"@esbuild/android-arm64": "0.21.5",
|
||||
"@esbuild/android-x64": "0.21.5",
|
||||
"@esbuild/darwin-arm64": "0.21.5",
|
||||
"@esbuild/darwin-x64": "0.21.5",
|
||||
"@esbuild/freebsd-arm64": "0.21.5",
|
||||
"@esbuild/freebsd-x64": "0.21.5",
|
||||
"@esbuild/linux-arm": "0.21.5",
|
||||
"@esbuild/linux-arm64": "0.21.5",
|
||||
"@esbuild/linux-ia32": "0.21.5",
|
||||
"@esbuild/linux-loong64": "0.21.5",
|
||||
"@esbuild/linux-mips64el": "0.21.5",
|
||||
"@esbuild/linux-ppc64": "0.21.5",
|
||||
"@esbuild/linux-riscv64": "0.21.5",
|
||||
"@esbuild/linux-s390x": "0.21.5",
|
||||
"@esbuild/linux-x64": "0.21.5",
|
||||
"@esbuild/netbsd-x64": "0.21.5",
|
||||
"@esbuild/openbsd-x64": "0.21.5",
|
||||
"@esbuild/sunos-x64": "0.21.5",
|
||||
"@esbuild/win32-arm64": "0.21.5",
|
||||
"@esbuild/win32-ia32": "0.21.5",
|
||||
"@esbuild/win32-x64": "0.21.5"
|
||||
}
|
||||
},
|
||||
"node_modules/estree-walker": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz",
|
||||
"integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/estree": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-reference": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz",
|
||||
"integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/estree": "^1.0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/kleur": {
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz",
|
||||
"integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/leaflet": {
|
||||
"version": "1.9.4",
|
||||
"resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.9.4.tgz",
|
||||
"integrity": "sha512-nxS1ynzJOmOlHp+iL3FyWqK89GtNL8U8rvlMOsQdTTssxZwCXh8N2NB3GDQOL+YR3XnWyZAxwQixURb+FA74PA==",
|
||||
"license": "BSD-2-Clause"
|
||||
},
|
||||
"node_modules/locate-character": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz",
|
||||
"integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/magic-string": {
|
||||
"version": "0.30.21",
|
||||
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz",
|
||||
"integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/sourcemap-codec": "^1.5.5"
|
||||
}
|
||||
},
|
||||
"node_modules/mdn-data": {
|
||||
"version": "2.0.30",
|
||||
"resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz",
|
||||
"integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==",
|
||||
"dev": true,
|
||||
"license": "CC0-1.0"
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/nanoid": {
|
||||
"version": "3.3.11",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
|
||||
"integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"nanoid": "bin/nanoid.cjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/periscopic": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz",
|
||||
"integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/estree": "^1.0.0",
|
||||
"estree-walker": "^3.0.0",
|
||||
"is-reference": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.5.8",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz",
|
||||
"integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/postcss/"
|
||||
},
|
||||
{
|
||||
"type": "tidelift",
|
||||
"url": "https://tidelift.com/funding/github/npm/postcss"
|
||||
},
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.11",
|
||||
"picocolors": "^1.1.1",
|
||||
"source-map-js": "^1.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || >=14"
|
||||
}
|
||||
},
|
||||
"node_modules/rollup": {
|
||||
"version": "4.60.0",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.0.tgz",
|
||||
"integrity": "sha512-yqjxruMGBQJ2gG4HtjZtAfXArHomazDHoFwFFmZZl0r7Pdo7qCIXKqKHZc8yeoMgzJJ+pO6pEEHa+V7uzWlrAQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/estree": "1.0.8"
|
||||
},
|
||||
"bin": {
|
||||
"rollup": "dist/bin/rollup"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0",
|
||||
"npm": ">=8.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-android-arm-eabi": "4.60.0",
|
||||
"@rollup/rollup-android-arm64": "4.60.0",
|
||||
"@rollup/rollup-darwin-arm64": "4.60.0",
|
||||
"@rollup/rollup-darwin-x64": "4.60.0",
|
||||
"@rollup/rollup-freebsd-arm64": "4.60.0",
|
||||
"@rollup/rollup-freebsd-x64": "4.60.0",
|
||||
"@rollup/rollup-linux-arm-gnueabihf": "4.60.0",
|
||||
"@rollup/rollup-linux-arm-musleabihf": "4.60.0",
|
||||
"@rollup/rollup-linux-arm64-gnu": "4.60.0",
|
||||
"@rollup/rollup-linux-arm64-musl": "4.60.0",
|
||||
"@rollup/rollup-linux-loong64-gnu": "4.60.0",
|
||||
"@rollup/rollup-linux-loong64-musl": "4.60.0",
|
||||
"@rollup/rollup-linux-ppc64-gnu": "4.60.0",
|
||||
"@rollup/rollup-linux-ppc64-musl": "4.60.0",
|
||||
"@rollup/rollup-linux-riscv64-gnu": "4.60.0",
|
||||
"@rollup/rollup-linux-riscv64-musl": "4.60.0",
|
||||
"@rollup/rollup-linux-s390x-gnu": "4.60.0",
|
||||
"@rollup/rollup-linux-x64-gnu": "4.60.0",
|
||||
"@rollup/rollup-linux-x64-musl": "4.60.0",
|
||||
"@rollup/rollup-openbsd-x64": "4.60.0",
|
||||
"@rollup/rollup-openharmony-arm64": "4.60.0",
|
||||
"@rollup/rollup-win32-arm64-msvc": "4.60.0",
|
||||
"@rollup/rollup-win32-ia32-msvc": "4.60.0",
|
||||
"@rollup/rollup-win32-x64-gnu": "4.60.0",
|
||||
"@rollup/rollup-win32-x64-msvc": "4.60.0",
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-js": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
|
||||
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
|
||||
"dev": true,
|
||||
"license": "BSD-3-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/svelte": {
|
||||
"version": "4.2.20",
|
||||
"resolved": "https://registry.npmjs.org/svelte/-/svelte-4.2.20.tgz",
|
||||
"integrity": "sha512-eeEgGc2DtiUil5ANdtd8vPwt9AgaMdnuUFnPft9F5oMvU/FHu5IHFic+p1dR/UOB7XU2mX2yHW+NcTch4DCh5Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@ampproject/remapping": "^2.2.1",
|
||||
"@jridgewell/sourcemap-codec": "^1.4.15",
|
||||
"@jridgewell/trace-mapping": "^0.3.18",
|
||||
"@types/estree": "^1.0.1",
|
||||
"acorn": "^8.9.0",
|
||||
"aria-query": "^5.3.0",
|
||||
"axobject-query": "^4.0.0",
|
||||
"code-red": "^1.0.3",
|
||||
"css-tree": "^2.3.1",
|
||||
"estree-walker": "^3.0.3",
|
||||
"is-reference": "^3.0.1",
|
||||
"locate-character": "^3.0.0",
|
||||
"magic-string": "^0.30.4",
|
||||
"periscopic": "^3.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/svelte-hmr": {
|
||||
"version": "0.16.0",
|
||||
"resolved": "https://registry.npmjs.org/svelte-hmr/-/svelte-hmr-0.16.0.tgz",
|
||||
"integrity": "sha512-Gyc7cOS3VJzLlfj7wKS0ZnzDVdv3Pn2IuVeJPk9m2skfhcu5bq3wtIZyQGggr7/Iim5rH5cncyQft/kRLupcnA==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": "^12.20 || ^14.13.1 || >= 16"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"svelte": "^3.19.0 || ^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.9.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
|
||||
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.4.21",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz",
|
||||
"integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.21.3",
|
||||
"postcss": "^8.4.43",
|
||||
"rollup": "^4.20.0"
|
||||
},
|
||||
"bin": {
|
||||
"vite": "bin/vite.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.0.0 || >=20.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/vitejs/vite?sponsor=1"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "~2.3.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/node": "^18.0.0 || >=20.0.0",
|
||||
"less": "*",
|
||||
"lightningcss": "^1.21.0",
|
||||
"sass": "*",
|
||||
"sass-embedded": "*",
|
||||
"stylus": "*",
|
||||
"sugarss": "*",
|
||||
"terser": "^5.4.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/node": {
|
||||
"optional": true
|
||||
},
|
||||
"less": {
|
||||
"optional": true
|
||||
},
|
||||
"lightningcss": {
|
||||
"optional": true
|
||||
},
|
||||
"sass": {
|
||||
"optional": true
|
||||
},
|
||||
"sass-embedded": {
|
||||
"optional": true
|
||||
},
|
||||
"stylus": {
|
||||
"optional": true
|
||||
},
|
||||
"sugarss": {
|
||||
"optional": true
|
||||
},
|
||||
"terser": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/vitefu": {
|
||||
"version": "0.2.5",
|
||||
"resolved": "https://registry.npmjs.org/vitefu/-/vitefu-0.2.5.tgz",
|
||||
"integrity": "sha512-SgHtMLoqaeeGnd2evZ849ZbACbnwQCIwRH57t18FxcXoZop0uQu0uzlIhJBlF/eWVzuce0sHeqPcDo+evVcg8Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"vite": "^3.0.0 || ^4.0.0 || ^5.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"vite": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1
frontend/node_modules/.vite/_svelte_metadata.json
generated
vendored
Normal file
1
frontend/node_modules/.vite/_svelte_metadata.json
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"compilerOptions":{"css":"external","dev":true},"extensions":[".svelte"]}
|
||||
79
frontend/node_modules/.vite/deps/_metadata.json
generated
vendored
Normal file
79
frontend/node_modules/.vite/deps/_metadata.json
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
{
|
||||
"hash": "3bb479c0",
|
||||
"configHash": "2b06be3b",
|
||||
"lockfileHash": "eacb5f78",
|
||||
"browserHash": "6bd9103c",
|
||||
"optimized": {
|
||||
"svelte/animate": {
|
||||
"src": "../../svelte/src/runtime/animate/index.js",
|
||||
"file": "svelte_animate.js",
|
||||
"fileHash": "b22864c4",
|
||||
"needsInterop": false
|
||||
},
|
||||
"svelte/easing": {
|
||||
"src": "../../svelte/src/runtime/easing/index.js",
|
||||
"file": "svelte_easing.js",
|
||||
"fileHash": "2d567d63",
|
||||
"needsInterop": false
|
||||
},
|
||||
"svelte/internal": {
|
||||
"src": "../../svelte/src/runtime/internal/index.js",
|
||||
"file": "svelte_internal.js",
|
||||
"fileHash": "8e584915",
|
||||
"needsInterop": false
|
||||
},
|
||||
"svelte/internal/disclose-version": {
|
||||
"src": "../../svelte/src/runtime/internal/disclose-version/index.js",
|
||||
"file": "svelte_internal_disclose-version.js",
|
||||
"fileHash": "6111c24e",
|
||||
"needsInterop": false
|
||||
},
|
||||
"svelte/motion": {
|
||||
"src": "../../svelte/src/runtime/motion/index.js",
|
||||
"file": "svelte_motion.js",
|
||||
"fileHash": "41716b01",
|
||||
"needsInterop": false
|
||||
},
|
||||
"svelte/store": {
|
||||
"src": "../../svelte/src/runtime/store/index.js",
|
||||
"file": "svelte_store.js",
|
||||
"fileHash": "cce96641",
|
||||
"needsInterop": false
|
||||
},
|
||||
"svelte/transition": {
|
||||
"src": "../../svelte/src/runtime/transition/index.js",
|
||||
"file": "svelte_transition.js",
|
||||
"fileHash": "e218bac8",
|
||||
"needsInterop": false
|
||||
},
|
||||
"svelte": {
|
||||
"src": "../../svelte/src/runtime/index.js",
|
||||
"file": "svelte.js",
|
||||
"fileHash": "ff9d3edd",
|
||||
"needsInterop": false
|
||||
},
|
||||
"leaflet": {
|
||||
"src": "../../leaflet/dist/leaflet-src.js",
|
||||
"file": "leaflet.js",
|
||||
"fileHash": "bfe3edfc",
|
||||
"needsInterop": true
|
||||
}
|
||||
},
|
||||
"chunks": {
|
||||
"chunk-7BDCSBRK": {
|
||||
"file": "chunk-7BDCSBRK.js"
|
||||
},
|
||||
"chunk-ETWSND26": {
|
||||
"file": "chunk-ETWSND26.js"
|
||||
},
|
||||
"chunk-LBLMGS3B": {
|
||||
"file": "chunk-LBLMGS3B.js"
|
||||
},
|
||||
"chunk-672HPU4M": {
|
||||
"file": "chunk-672HPU4M.js"
|
||||
},
|
||||
"chunk-JVWSFFO4": {
|
||||
"file": "chunk-JVWSFFO4.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
9
frontend/node_modules/.vite/deps/chunk-672HPU4M.js
generated
vendored
Normal file
9
frontend/node_modules/.vite/deps/chunk-672HPU4M.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
// node_modules/svelte/src/shared/version.js
|
||||
var VERSION = "4.2.20";
|
||||
var PUBLIC_VERSION = "4";
|
||||
|
||||
export {
|
||||
VERSION,
|
||||
PUBLIC_VERSION
|
||||
};
|
||||
//# sourceMappingURL=chunk-672HPU4M.js.map
|
||||
7
frontend/node_modules/.vite/deps/chunk-672HPU4M.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/chunk-672HPU4M.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": ["../../svelte/src/shared/version.js"],
|
||||
"sourcesContent": ["// generated during release, do not modify\n\n/**\n * The current version, as set in package.json.\n *\n * https://svelte.dev/docs/svelte-compiler#svelte-version\n * @type {string}\n */\nexport const VERSION = '4.2.20';\nexport const PUBLIC_VERSION = '4';\n"],
|
||||
"mappings": ";AAQO,IAAM,UAAU;AAChB,IAAM,iBAAiB;",
|
||||
"names": []
|
||||
}
|
||||
117
frontend/node_modules/.vite/deps/chunk-7BDCSBRK.js
generated
vendored
Normal file
117
frontend/node_modules/.vite/deps/chunk-7BDCSBRK.js
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
import {
|
||||
is_function,
|
||||
noop,
|
||||
run_all,
|
||||
safe_not_equal,
|
||||
subscribe
|
||||
} from "./chunk-LBLMGS3B.js";
|
||||
|
||||
// node_modules/svelte/src/runtime/store/index.js
|
||||
var subscriber_queue = [];
|
||||
function readable(value, start) {
|
||||
return {
|
||||
subscribe: writable(value, start).subscribe
|
||||
};
|
||||
}
|
||||
function writable(value, start = noop) {
|
||||
let stop;
|
||||
const subscribers = /* @__PURE__ */ new Set();
|
||||
function set(new_value) {
|
||||
if (safe_not_equal(value, new_value)) {
|
||||
value = new_value;
|
||||
if (stop) {
|
||||
const run_queue = !subscriber_queue.length;
|
||||
for (const subscriber of subscribers) {
|
||||
subscriber[1]();
|
||||
subscriber_queue.push(subscriber, value);
|
||||
}
|
||||
if (run_queue) {
|
||||
for (let i = 0; i < subscriber_queue.length; i += 2) {
|
||||
subscriber_queue[i][0](subscriber_queue[i + 1]);
|
||||
}
|
||||
subscriber_queue.length = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function update(fn) {
|
||||
set(fn(value));
|
||||
}
|
||||
function subscribe2(run, invalidate = noop) {
|
||||
const subscriber = [run, invalidate];
|
||||
subscribers.add(subscriber);
|
||||
if (subscribers.size === 1) {
|
||||
stop = start(set, update) || noop;
|
||||
}
|
||||
run(value);
|
||||
return () => {
|
||||
subscribers.delete(subscriber);
|
||||
if (subscribers.size === 0 && stop) {
|
||||
stop();
|
||||
stop = null;
|
||||
}
|
||||
};
|
||||
}
|
||||
return { set, update, subscribe: subscribe2 };
|
||||
}
|
||||
function derived(stores, fn, initial_value) {
|
||||
const single = !Array.isArray(stores);
|
||||
const stores_array = single ? [stores] : stores;
|
||||
if (!stores_array.every(Boolean)) {
|
||||
throw new Error("derived() expects stores as input, got a falsy value");
|
||||
}
|
||||
const auto = fn.length < 2;
|
||||
return readable(initial_value, (set, update) => {
|
||||
let started = false;
|
||||
const values = [];
|
||||
let pending = 0;
|
||||
let cleanup = noop;
|
||||
const sync = () => {
|
||||
if (pending) {
|
||||
return;
|
||||
}
|
||||
cleanup();
|
||||
const result = fn(single ? values[0] : values, set, update);
|
||||
if (auto) {
|
||||
set(result);
|
||||
} else {
|
||||
cleanup = is_function(result) ? result : noop;
|
||||
}
|
||||
};
|
||||
const unsubscribers = stores_array.map(
|
||||
(store, i) => subscribe(
|
||||
store,
|
||||
(value) => {
|
||||
values[i] = value;
|
||||
pending &= ~(1 << i);
|
||||
if (started) {
|
||||
sync();
|
||||
}
|
||||
},
|
||||
() => {
|
||||
pending |= 1 << i;
|
||||
}
|
||||
)
|
||||
);
|
||||
started = true;
|
||||
sync();
|
||||
return function stop() {
|
||||
run_all(unsubscribers);
|
||||
cleanup();
|
||||
started = false;
|
||||
};
|
||||
});
|
||||
}
|
||||
function readonly(store) {
|
||||
return {
|
||||
subscribe: store.subscribe.bind(store)
|
||||
};
|
||||
}
|
||||
|
||||
export {
|
||||
readable,
|
||||
writable,
|
||||
derived,
|
||||
readonly
|
||||
};
|
||||
//# sourceMappingURL=chunk-7BDCSBRK.js.map
|
||||
7
frontend/node_modules/.vite/deps/chunk-7BDCSBRK.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/chunk-7BDCSBRK.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
144
frontend/node_modules/.vite/deps/chunk-ETWSND26.js
generated
vendored
Normal file
144
frontend/node_modules/.vite/deps/chunk-ETWSND26.js
generated
vendored
Normal file
@@ -0,0 +1,144 @@
|
||||
// node_modules/svelte/src/runtime/easing/index.js
|
||||
function backInOut(t) {
|
||||
const s = 1.70158 * 1.525;
|
||||
if ((t *= 2) < 1) return 0.5 * (t * t * ((s + 1) * t - s));
|
||||
return 0.5 * ((t -= 2) * t * ((s + 1) * t + s) + 2);
|
||||
}
|
||||
function backIn(t) {
|
||||
const s = 1.70158;
|
||||
return t * t * ((s + 1) * t - s);
|
||||
}
|
||||
function backOut(t) {
|
||||
const s = 1.70158;
|
||||
return --t * t * ((s + 1) * t + s) + 1;
|
||||
}
|
||||
function bounceOut(t) {
|
||||
const a = 4 / 11;
|
||||
const b = 8 / 11;
|
||||
const c = 9 / 10;
|
||||
const ca = 4356 / 361;
|
||||
const cb = 35442 / 1805;
|
||||
const cc = 16061 / 1805;
|
||||
const t2 = t * t;
|
||||
return t < a ? 7.5625 * t2 : t < b ? 9.075 * t2 - 9.9 * t + 3.4 : t < c ? ca * t2 - cb * t + cc : 10.8 * t * t - 20.52 * t + 10.72;
|
||||
}
|
||||
function bounceInOut(t) {
|
||||
return t < 0.5 ? 0.5 * (1 - bounceOut(1 - t * 2)) : 0.5 * bounceOut(t * 2 - 1) + 0.5;
|
||||
}
|
||||
function bounceIn(t) {
|
||||
return 1 - bounceOut(1 - t);
|
||||
}
|
||||
function circInOut(t) {
|
||||
if ((t *= 2) < 1) return -0.5 * (Math.sqrt(1 - t * t) - 1);
|
||||
return 0.5 * (Math.sqrt(1 - (t -= 2) * t) + 1);
|
||||
}
|
||||
function circIn(t) {
|
||||
return 1 - Math.sqrt(1 - t * t);
|
||||
}
|
||||
function circOut(t) {
|
||||
return Math.sqrt(1 - --t * t);
|
||||
}
|
||||
function cubicInOut(t) {
|
||||
return t < 0.5 ? 4 * t * t * t : 0.5 * Math.pow(2 * t - 2, 3) + 1;
|
||||
}
|
||||
function cubicIn(t) {
|
||||
return t * t * t;
|
||||
}
|
||||
function cubicOut(t) {
|
||||
const f = t - 1;
|
||||
return f * f * f + 1;
|
||||
}
|
||||
function elasticInOut(t) {
|
||||
return t < 0.5 ? 0.5 * Math.sin(13 * Math.PI / 2 * 2 * t) * Math.pow(2, 10 * (2 * t - 1)) : 0.5 * Math.sin(-13 * Math.PI / 2 * (2 * t - 1 + 1)) * Math.pow(2, -10 * (2 * t - 1)) + 1;
|
||||
}
|
||||
function elasticIn(t) {
|
||||
return Math.sin(13 * t * Math.PI / 2) * Math.pow(2, 10 * (t - 1));
|
||||
}
|
||||
function elasticOut(t) {
|
||||
return Math.sin(-13 * (t + 1) * Math.PI / 2) * Math.pow(2, -10 * t) + 1;
|
||||
}
|
||||
function expoInOut(t) {
|
||||
return t === 0 || t === 1 ? t : t < 0.5 ? 0.5 * Math.pow(2, 20 * t - 10) : -0.5 * Math.pow(2, 10 - t * 20) + 1;
|
||||
}
|
||||
function expoIn(t) {
|
||||
return t === 0 ? t : Math.pow(2, 10 * (t - 1));
|
||||
}
|
||||
function expoOut(t) {
|
||||
return t === 1 ? t : 1 - Math.pow(2, -10 * t);
|
||||
}
|
||||
function quadInOut(t) {
|
||||
t /= 0.5;
|
||||
if (t < 1) return 0.5 * t * t;
|
||||
t--;
|
||||
return -0.5 * (t * (t - 2) - 1);
|
||||
}
|
||||
function quadIn(t) {
|
||||
return t * t;
|
||||
}
|
||||
function quadOut(t) {
|
||||
return -t * (t - 2);
|
||||
}
|
||||
function quartInOut(t) {
|
||||
return t < 0.5 ? 8 * Math.pow(t, 4) : -8 * Math.pow(t - 1, 4) + 1;
|
||||
}
|
||||
function quartIn(t) {
|
||||
return Math.pow(t, 4);
|
||||
}
|
||||
function quartOut(t) {
|
||||
return Math.pow(t - 1, 3) * (1 - t) + 1;
|
||||
}
|
||||
function quintInOut(t) {
|
||||
if ((t *= 2) < 1) return 0.5 * t * t * t * t * t;
|
||||
return 0.5 * ((t -= 2) * t * t * t * t + 2);
|
||||
}
|
||||
function quintIn(t) {
|
||||
return t * t * t * t * t;
|
||||
}
|
||||
function quintOut(t) {
|
||||
return --t * t * t * t * t + 1;
|
||||
}
|
||||
function sineInOut(t) {
|
||||
return -0.5 * (Math.cos(Math.PI * t) - 1);
|
||||
}
|
||||
function sineIn(t) {
|
||||
const v = Math.cos(t * Math.PI * 0.5);
|
||||
if (Math.abs(v) < 1e-14) return 1;
|
||||
else return 1 - v;
|
||||
}
|
||||
function sineOut(t) {
|
||||
return Math.sin(t * Math.PI / 2);
|
||||
}
|
||||
|
||||
export {
|
||||
backInOut,
|
||||
backIn,
|
||||
backOut,
|
||||
bounceOut,
|
||||
bounceInOut,
|
||||
bounceIn,
|
||||
circInOut,
|
||||
circIn,
|
||||
circOut,
|
||||
cubicInOut,
|
||||
cubicIn,
|
||||
cubicOut,
|
||||
elasticInOut,
|
||||
elasticIn,
|
||||
elasticOut,
|
||||
expoInOut,
|
||||
expoIn,
|
||||
expoOut,
|
||||
quadInOut,
|
||||
quadIn,
|
||||
quadOut,
|
||||
quartInOut,
|
||||
quartIn,
|
||||
quartOut,
|
||||
quintInOut,
|
||||
quintIn,
|
||||
quintOut,
|
||||
sineInOut,
|
||||
sineIn,
|
||||
sineOut
|
||||
};
|
||||
//# sourceMappingURL=chunk-ETWSND26.js.map
|
||||
7
frontend/node_modules/.vite/deps/chunk-ETWSND26.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/chunk-ETWSND26.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
13
frontend/node_modules/.vite/deps/chunk-JVWSFFO4.js
generated
vendored
Normal file
13
frontend/node_modules/.vite/deps/chunk-JVWSFFO4.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
||||
var __commonJS = (cb, mod) => function __require() {
|
||||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
||||
};
|
||||
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
||||
|
||||
export {
|
||||
__commonJS,
|
||||
__publicField
|
||||
};
|
||||
//# sourceMappingURL=chunk-JVWSFFO4.js.map
|
||||
7
frontend/node_modules/.vite/deps/chunk-JVWSFFO4.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/chunk-JVWSFFO4.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": [],
|
||||
"sourcesContent": [],
|
||||
"mappings": "",
|
||||
"names": []
|
||||
}
|
||||
2853
frontend/node_modules/.vite/deps/chunk-LBLMGS3B.js
generated
vendored
Normal file
2853
frontend/node_modules/.vite/deps/chunk-LBLMGS3B.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
7
frontend/node_modules/.vite/deps/chunk-LBLMGS3B.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/chunk-LBLMGS3B.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
9563
frontend/node_modules/.vite/deps/leaflet.js
generated
vendored
Normal file
9563
frontend/node_modules/.vite/deps/leaflet.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
7
frontend/node_modules/.vite/deps/leaflet.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/leaflet.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
frontend/node_modules/.vite/deps/package.json
generated
vendored
Normal file
3
frontend/node_modules/.vite/deps/package.json
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"type": "module"
|
||||
}
|
||||
31
frontend/node_modules/.vite/deps/svelte.js
generated
vendored
Normal file
31
frontend/node_modules/.vite/deps/svelte.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
import {
|
||||
SvelteComponentDev,
|
||||
SvelteComponentTyped,
|
||||
afterUpdate,
|
||||
beforeUpdate,
|
||||
createEventDispatcher,
|
||||
getAllContexts,
|
||||
getContext,
|
||||
hasContext,
|
||||
onDestroy,
|
||||
onMount,
|
||||
setContext,
|
||||
tick
|
||||
} from "./chunk-LBLMGS3B.js";
|
||||
import "./chunk-672HPU4M.js";
|
||||
import "./chunk-JVWSFFO4.js";
|
||||
export {
|
||||
SvelteComponentDev as SvelteComponent,
|
||||
SvelteComponentTyped,
|
||||
afterUpdate,
|
||||
beforeUpdate,
|
||||
createEventDispatcher,
|
||||
getAllContexts,
|
||||
getContext,
|
||||
hasContext,
|
||||
onDestroy,
|
||||
onMount,
|
||||
setContext,
|
||||
tick
|
||||
};
|
||||
//# sourceMappingURL=svelte.js.map
|
||||
7
frontend/node_modules/.vite/deps/svelte.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/svelte.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": [],
|
||||
"sourcesContent": [],
|
||||
"mappings": "",
|
||||
"names": []
|
||||
}
|
||||
34
frontend/node_modules/.vite/deps/svelte_animate.js
generated
vendored
Normal file
34
frontend/node_modules/.vite/deps/svelte_animate.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
import {
|
||||
cubicOut
|
||||
} from "./chunk-ETWSND26.js";
|
||||
import {
|
||||
is_function
|
||||
} from "./chunk-LBLMGS3B.js";
|
||||
import "./chunk-672HPU4M.js";
|
||||
import "./chunk-JVWSFFO4.js";
|
||||
|
||||
// node_modules/svelte/src/runtime/animate/index.js
|
||||
function flip(node, { from, to }, params = {}) {
|
||||
const style = getComputedStyle(node);
|
||||
const transform = style.transform === "none" ? "" : style.transform;
|
||||
const [ox, oy] = style.transformOrigin.split(" ").map(parseFloat);
|
||||
const dx = from.left + from.width * ox / to.width - (to.left + ox);
|
||||
const dy = from.top + from.height * oy / to.height - (to.top + oy);
|
||||
const { delay = 0, duration = (d) => Math.sqrt(d) * 120, easing = cubicOut } = params;
|
||||
return {
|
||||
delay,
|
||||
duration: is_function(duration) ? duration(Math.sqrt(dx * dx + dy * dy)) : duration,
|
||||
easing,
|
||||
css: (t, u) => {
|
||||
const x = u * dx;
|
||||
const y = u * dy;
|
||||
const sx = t + u * from.width / to.width;
|
||||
const sy = t + u * from.height / to.height;
|
||||
return `transform: ${transform} translate(${x}px, ${y}px) scale(${sx}, ${sy});`;
|
||||
}
|
||||
};
|
||||
}
|
||||
export {
|
||||
flip
|
||||
};
|
||||
//# sourceMappingURL=svelte_animate.js.map
|
||||
7
frontend/node_modules/.vite/deps/svelte_animate.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/svelte_animate.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": ["../../svelte/src/runtime/animate/index.js"],
|
||||
"sourcesContent": ["import { cubicOut } from '../easing/index.js';\nimport { is_function } from '../internal/index.js';\n\n/**\n * The flip function calculates the start and end position of an element and animates between them, translating the x and y values.\n * `flip` stands for [First, Last, Invert, Play](https://aerotwist.com/blog/flip-your-animations/).\n *\n * https://svelte.dev/docs/svelte-animate#flip\n * @param {Element} node\n * @param {{ from: DOMRect; to: DOMRect }} fromTo\n * @param {import('./public.js').FlipParams} params\n * @returns {import('./public.js').AnimationConfig}\n */\nexport function flip(node, { from, to }, params = {}) {\n\tconst style = getComputedStyle(node);\n\tconst transform = style.transform === 'none' ? '' : style.transform;\n\tconst [ox, oy] = style.transformOrigin.split(' ').map(parseFloat);\n\tconst dx = from.left + (from.width * ox) / to.width - (to.left + ox);\n\tconst dy = from.top + (from.height * oy) / to.height - (to.top + oy);\n\tconst { delay = 0, duration = (d) => Math.sqrt(d) * 120, easing = cubicOut } = params;\n\treturn {\n\t\tdelay,\n\t\tduration: is_function(duration) ? duration(Math.sqrt(dx * dx + dy * dy)) : duration,\n\t\teasing,\n\t\tcss: (t, u) => {\n\t\t\tconst x = u * dx;\n\t\t\tconst y = u * dy;\n\t\t\tconst sx = t + (u * from.width) / to.width;\n\t\t\tconst sy = t + (u * from.height) / to.height;\n\t\t\treturn `transform: ${transform} translate(${x}px, ${y}px) scale(${sx}, ${sy});`;\n\t\t}\n\t};\n}\n"],
|
||||
"mappings": ";;;;;;;;;;AAaO,SAAS,KAAK,MAAM,EAAE,MAAM,GAAG,GAAG,SAAS,CAAC,GAAG;AACrD,QAAM,QAAQ,iBAAiB,IAAI;AACnC,QAAM,YAAY,MAAM,cAAc,SAAS,KAAK,MAAM;AAC1D,QAAM,CAAC,IAAI,EAAE,IAAI,MAAM,gBAAgB,MAAM,GAAG,EAAE,IAAI,UAAU;AAChE,QAAM,KAAK,KAAK,OAAQ,KAAK,QAAQ,KAAM,GAAG,SAAS,GAAG,OAAO;AACjE,QAAM,KAAK,KAAK,MAAO,KAAK,SAAS,KAAM,GAAG,UAAU,GAAG,MAAM;AACjE,QAAM,EAAE,QAAQ,GAAG,WAAW,CAAC,MAAM,KAAK,KAAK,CAAC,IAAI,KAAK,SAAS,SAAS,IAAI;AAC/E,SAAO;AAAA,IACN;AAAA,IACA,UAAU,YAAY,QAAQ,IAAI,SAAS,KAAK,KAAK,KAAK,KAAK,KAAK,EAAE,CAAC,IAAI;AAAA,IAC3E;AAAA,IACA,KAAK,CAAC,GAAG,MAAM;AACd,YAAM,IAAI,IAAI;AACd,YAAM,IAAI,IAAI;AACd,YAAM,KAAK,IAAK,IAAI,KAAK,QAAS,GAAG;AACrC,YAAM,KAAK,IAAK,IAAI,KAAK,SAAU,GAAG;AACtC,aAAO,cAAc,SAAS,cAAc,CAAC,OAAO,CAAC,aAAa,EAAE,KAAK,EAAE;AAAA,IAC5E;AAAA,EACD;AACD;",
|
||||
"names": []
|
||||
}
|
||||
71
frontend/node_modules/.vite/deps/svelte_easing.js
generated
vendored
Normal file
71
frontend/node_modules/.vite/deps/svelte_easing.js
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
import {
|
||||
backIn,
|
||||
backInOut,
|
||||
backOut,
|
||||
bounceIn,
|
||||
bounceInOut,
|
||||
bounceOut,
|
||||
circIn,
|
||||
circInOut,
|
||||
circOut,
|
||||
cubicIn,
|
||||
cubicInOut,
|
||||
cubicOut,
|
||||
elasticIn,
|
||||
elasticInOut,
|
||||
elasticOut,
|
||||
expoIn,
|
||||
expoInOut,
|
||||
expoOut,
|
||||
quadIn,
|
||||
quadInOut,
|
||||
quadOut,
|
||||
quartIn,
|
||||
quartInOut,
|
||||
quartOut,
|
||||
quintIn,
|
||||
quintInOut,
|
||||
quintOut,
|
||||
sineIn,
|
||||
sineInOut,
|
||||
sineOut
|
||||
} from "./chunk-ETWSND26.js";
|
||||
import {
|
||||
identity
|
||||
} from "./chunk-LBLMGS3B.js";
|
||||
import "./chunk-672HPU4M.js";
|
||||
import "./chunk-JVWSFFO4.js";
|
||||
export {
|
||||
backIn,
|
||||
backInOut,
|
||||
backOut,
|
||||
bounceIn,
|
||||
bounceInOut,
|
||||
bounceOut,
|
||||
circIn,
|
||||
circInOut,
|
||||
circOut,
|
||||
cubicIn,
|
||||
cubicInOut,
|
||||
cubicOut,
|
||||
elasticIn,
|
||||
elasticInOut,
|
||||
elasticOut,
|
||||
expoIn,
|
||||
expoInOut,
|
||||
expoOut,
|
||||
identity as linear,
|
||||
quadIn,
|
||||
quadInOut,
|
||||
quadOut,
|
||||
quartIn,
|
||||
quartInOut,
|
||||
quartOut,
|
||||
quintIn,
|
||||
quintInOut,
|
||||
quintOut,
|
||||
sineIn,
|
||||
sineInOut,
|
||||
sineOut
|
||||
};
|
||||
//# sourceMappingURL=svelte_easing.js.map
|
||||
7
frontend/node_modules/.vite/deps/svelte_easing.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/svelte_easing.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": [],
|
||||
"sourcesContent": [],
|
||||
"mappings": "",
|
||||
"names": []
|
||||
}
|
||||
413
frontend/node_modules/.vite/deps/svelte_internal.js
generated
vendored
Normal file
413
frontend/node_modules/.vite/deps/svelte_internal.js
generated
vendored
Normal file
@@ -0,0 +1,413 @@
|
||||
import {
|
||||
HtmlTag,
|
||||
HtmlTagHydration,
|
||||
ResizeObserverSingleton,
|
||||
SvelteComponent,
|
||||
SvelteComponentDev,
|
||||
SvelteComponentTyped,
|
||||
SvelteElement,
|
||||
action_destroyer,
|
||||
add_attribute,
|
||||
add_classes,
|
||||
add_flush_callback,
|
||||
add_iframe_resize_listener,
|
||||
add_location,
|
||||
add_render_callback,
|
||||
add_styles,
|
||||
add_transform,
|
||||
afterUpdate,
|
||||
append,
|
||||
append_dev,
|
||||
append_empty_stylesheet,
|
||||
append_hydration,
|
||||
append_hydration_dev,
|
||||
append_styles,
|
||||
assign,
|
||||
attr,
|
||||
attr_dev,
|
||||
attribute_to_object,
|
||||
beforeUpdate,
|
||||
bind,
|
||||
binding_callbacks,
|
||||
blank_object,
|
||||
bubble,
|
||||
check_outros,
|
||||
children,
|
||||
claim_comment,
|
||||
claim_component,
|
||||
claim_element,
|
||||
claim_html_tag,
|
||||
claim_space,
|
||||
claim_svg_element,
|
||||
claim_text,
|
||||
clear_loops,
|
||||
comment,
|
||||
component_subscribe,
|
||||
compute_rest_props,
|
||||
compute_slots,
|
||||
construct_svelte_component,
|
||||
construct_svelte_component_dev,
|
||||
contenteditable_truthy_values,
|
||||
createEventDispatcher,
|
||||
create_animation,
|
||||
create_bidirectional_transition,
|
||||
create_component,
|
||||
create_custom_element,
|
||||
create_in_transition,
|
||||
create_out_transition,
|
||||
create_slot,
|
||||
create_ssr_component,
|
||||
current_component,
|
||||
custom_event,
|
||||
dataset_dev,
|
||||
debug,
|
||||
destroy_block,
|
||||
destroy_component,
|
||||
destroy_each,
|
||||
detach,
|
||||
detach_after_dev,
|
||||
detach_before_dev,
|
||||
detach_between_dev,
|
||||
detach_dev,
|
||||
dirty_components,
|
||||
dispatch_dev,
|
||||
each,
|
||||
element,
|
||||
element_is,
|
||||
empty,
|
||||
end_hydrating,
|
||||
ensure_array_like,
|
||||
ensure_array_like_dev,
|
||||
escape,
|
||||
escape_attribute_value,
|
||||
escape_object,
|
||||
exclude_internal_props,
|
||||
fix_and_destroy_block,
|
||||
fix_and_outro_and_destroy_block,
|
||||
fix_position,
|
||||
flush,
|
||||
flush_render_callbacks,
|
||||
getAllContexts,
|
||||
getContext,
|
||||
get_all_dirty_from_scope,
|
||||
get_binding_group_value,
|
||||
get_current_component,
|
||||
get_custom_elements_slots,
|
||||
get_root_for_style,
|
||||
get_slot_changes,
|
||||
get_spread_object,
|
||||
get_spread_update,
|
||||
get_store_value,
|
||||
get_svelte_dataset,
|
||||
globals,
|
||||
group_outros,
|
||||
handle_promise,
|
||||
hasContext,
|
||||
has_prop,
|
||||
head_selector,
|
||||
identity,
|
||||
init,
|
||||
init_binding_group,
|
||||
init_binding_group_dynamic,
|
||||
insert,
|
||||
insert_dev,
|
||||
insert_hydration,
|
||||
insert_hydration_dev,
|
||||
intros,
|
||||
invalid_attribute_name_character,
|
||||
is_client,
|
||||
is_crossorigin,
|
||||
is_empty,
|
||||
is_function,
|
||||
is_promise,
|
||||
is_void,
|
||||
listen,
|
||||
listen_dev,
|
||||
loop,
|
||||
loop_guard,
|
||||
merge_ssr_styles,
|
||||
missing_component,
|
||||
mount_component,
|
||||
noop,
|
||||
not_equal,
|
||||
now,
|
||||
null_to_empty,
|
||||
object_without_properties,
|
||||
onDestroy,
|
||||
onMount,
|
||||
once,
|
||||
outro_and_destroy_block,
|
||||
prevent_default,
|
||||
prop_dev,
|
||||
query_selector_all,
|
||||
raf,
|
||||
resize_observer_border_box,
|
||||
resize_observer_content_box,
|
||||
resize_observer_device_pixel_content_box,
|
||||
run,
|
||||
run_all,
|
||||
safe_not_equal,
|
||||
schedule_update,
|
||||
select_multiple_value,
|
||||
select_option,
|
||||
select_options,
|
||||
select_value,
|
||||
self,
|
||||
setContext,
|
||||
set_attributes,
|
||||
set_current_component,
|
||||
set_custom_element_data,
|
||||
set_custom_element_data_map,
|
||||
set_data,
|
||||
set_data_contenteditable,
|
||||
set_data_contenteditable_dev,
|
||||
set_data_dev,
|
||||
set_data_maybe_contenteditable,
|
||||
set_data_maybe_contenteditable_dev,
|
||||
set_dynamic_element_data,
|
||||
set_input_type,
|
||||
set_input_value,
|
||||
set_now,
|
||||
set_raf,
|
||||
set_store_value,
|
||||
set_style,
|
||||
set_svg_attributes,
|
||||
space,
|
||||
split_css_unit,
|
||||
spread,
|
||||
src_url_equal,
|
||||
srcset_url_equal,
|
||||
start_hydrating,
|
||||
stop_immediate_propagation,
|
||||
stop_propagation,
|
||||
stringify_spread,
|
||||
subscribe,
|
||||
svg_element,
|
||||
text,
|
||||
tick,
|
||||
time_ranges_to_array,
|
||||
to_number,
|
||||
toggle_class,
|
||||
transition_in,
|
||||
transition_out,
|
||||
trusted,
|
||||
update_await_block_branch,
|
||||
update_keyed_each,
|
||||
update_slot,
|
||||
update_slot_base,
|
||||
validate_component,
|
||||
validate_dynamic_element,
|
||||
validate_each_keys,
|
||||
validate_slots,
|
||||
validate_store,
|
||||
validate_void_dynamic_element,
|
||||
xlink_attr
|
||||
} from "./chunk-LBLMGS3B.js";
|
||||
import "./chunk-672HPU4M.js";
|
||||
import "./chunk-JVWSFFO4.js";
|
||||
export {
|
||||
HtmlTag,
|
||||
HtmlTagHydration,
|
||||
ResizeObserverSingleton,
|
||||
SvelteComponent,
|
||||
SvelteComponentDev,
|
||||
SvelteComponentTyped,
|
||||
SvelteElement,
|
||||
action_destroyer,
|
||||
add_attribute,
|
||||
add_classes,
|
||||
add_flush_callback,
|
||||
add_iframe_resize_listener,
|
||||
add_location,
|
||||
add_render_callback,
|
||||
add_styles,
|
||||
add_transform,
|
||||
afterUpdate,
|
||||
append,
|
||||
append_dev,
|
||||
append_empty_stylesheet,
|
||||
append_hydration,
|
||||
append_hydration_dev,
|
||||
append_styles,
|
||||
assign,
|
||||
attr,
|
||||
attr_dev,
|
||||
attribute_to_object,
|
||||
beforeUpdate,
|
||||
bind,
|
||||
binding_callbacks,
|
||||
blank_object,
|
||||
bubble,
|
||||
check_outros,
|
||||
children,
|
||||
claim_comment,
|
||||
claim_component,
|
||||
claim_element,
|
||||
claim_html_tag,
|
||||
claim_space,
|
||||
claim_svg_element,
|
||||
claim_text,
|
||||
clear_loops,
|
||||
comment,
|
||||
component_subscribe,
|
||||
compute_rest_props,
|
||||
compute_slots,
|
||||
construct_svelte_component,
|
||||
construct_svelte_component_dev,
|
||||
contenteditable_truthy_values,
|
||||
createEventDispatcher,
|
||||
create_animation,
|
||||
create_bidirectional_transition,
|
||||
create_component,
|
||||
create_custom_element,
|
||||
create_in_transition,
|
||||
create_out_transition,
|
||||
create_slot,
|
||||
create_ssr_component,
|
||||
current_component,
|
||||
custom_event,
|
||||
dataset_dev,
|
||||
debug,
|
||||
destroy_block,
|
||||
destroy_component,
|
||||
destroy_each,
|
||||
detach,
|
||||
detach_after_dev,
|
||||
detach_before_dev,
|
||||
detach_between_dev,
|
||||
detach_dev,
|
||||
dirty_components,
|
||||
dispatch_dev,
|
||||
each,
|
||||
element,
|
||||
element_is,
|
||||
empty,
|
||||
end_hydrating,
|
||||
ensure_array_like,
|
||||
ensure_array_like_dev,
|
||||
escape,
|
||||
escape_attribute_value,
|
||||
escape_object,
|
||||
exclude_internal_props,
|
||||
fix_and_destroy_block,
|
||||
fix_and_outro_and_destroy_block,
|
||||
fix_position,
|
||||
flush,
|
||||
flush_render_callbacks,
|
||||
getAllContexts,
|
||||
getContext,
|
||||
get_all_dirty_from_scope,
|
||||
get_binding_group_value,
|
||||
get_current_component,
|
||||
get_custom_elements_slots,
|
||||
get_root_for_style,
|
||||
get_slot_changes,
|
||||
get_spread_object,
|
||||
get_spread_update,
|
||||
get_store_value,
|
||||
get_svelte_dataset,
|
||||
globals,
|
||||
group_outros,
|
||||
handle_promise,
|
||||
hasContext,
|
||||
has_prop,
|
||||
head_selector,
|
||||
identity,
|
||||
init,
|
||||
init_binding_group,
|
||||
init_binding_group_dynamic,
|
||||
insert,
|
||||
insert_dev,
|
||||
insert_hydration,
|
||||
insert_hydration_dev,
|
||||
intros,
|
||||
invalid_attribute_name_character,
|
||||
is_client,
|
||||
is_crossorigin,
|
||||
is_empty,
|
||||
is_function,
|
||||
is_promise,
|
||||
is_void,
|
||||
listen,
|
||||
listen_dev,
|
||||
loop,
|
||||
loop_guard,
|
||||
merge_ssr_styles,
|
||||
missing_component,
|
||||
mount_component,
|
||||
noop,
|
||||
not_equal,
|
||||
now,
|
||||
null_to_empty,
|
||||
object_without_properties,
|
||||
onDestroy,
|
||||
onMount,
|
||||
once,
|
||||
outro_and_destroy_block,
|
||||
prevent_default,
|
||||
prop_dev,
|
||||
query_selector_all,
|
||||
raf,
|
||||
resize_observer_border_box,
|
||||
resize_observer_content_box,
|
||||
resize_observer_device_pixel_content_box,
|
||||
run,
|
||||
run_all,
|
||||
safe_not_equal,
|
||||
schedule_update,
|
||||
select_multiple_value,
|
||||
select_option,
|
||||
select_options,
|
||||
select_value,
|
||||
self,
|
||||
setContext,
|
||||
set_attributes,
|
||||
set_current_component,
|
||||
set_custom_element_data,
|
||||
set_custom_element_data_map,
|
||||
set_data,
|
||||
set_data_contenteditable,
|
||||
set_data_contenteditable_dev,
|
||||
set_data_dev,
|
||||
set_data_maybe_contenteditable,
|
||||
set_data_maybe_contenteditable_dev,
|
||||
set_dynamic_element_data,
|
||||
set_input_type,
|
||||
set_input_value,
|
||||
set_now,
|
||||
set_raf,
|
||||
set_store_value,
|
||||
set_style,
|
||||
set_svg_attributes,
|
||||
space,
|
||||
split_css_unit,
|
||||
spread,
|
||||
src_url_equal,
|
||||
srcset_url_equal,
|
||||
start_hydrating,
|
||||
stop_immediate_propagation,
|
||||
stop_propagation,
|
||||
stringify_spread,
|
||||
subscribe,
|
||||
svg_element,
|
||||
text,
|
||||
tick,
|
||||
time_ranges_to_array,
|
||||
to_number,
|
||||
toggle_class,
|
||||
transition_in,
|
||||
transition_out,
|
||||
trusted,
|
||||
update_await_block_branch,
|
||||
update_keyed_each,
|
||||
update_slot,
|
||||
update_slot_base,
|
||||
validate_component,
|
||||
validate_dynamic_element,
|
||||
validate_each_keys,
|
||||
validate_slots,
|
||||
validate_store,
|
||||
validate_void_dynamic_element,
|
||||
xlink_attr
|
||||
};
|
||||
//# sourceMappingURL=svelte_internal.js.map
|
||||
7
frontend/node_modules/.vite/deps/svelte_internal.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/svelte_internal.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": [],
|
||||
"sourcesContent": [],
|
||||
"mappings": "",
|
||||
"names": []
|
||||
}
|
||||
9
frontend/node_modules/.vite/deps/svelte_internal_disclose-version.js
generated
vendored
Normal file
9
frontend/node_modules/.vite/deps/svelte_internal_disclose-version.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import {
|
||||
PUBLIC_VERSION
|
||||
} from "./chunk-672HPU4M.js";
|
||||
import "./chunk-JVWSFFO4.js";
|
||||
|
||||
// node_modules/svelte/src/runtime/internal/disclose-version/index.js
|
||||
if (typeof window !== "undefined")
|
||||
(window.__svelte || (window.__svelte = { v: /* @__PURE__ */ new Set() })).v.add(PUBLIC_VERSION);
|
||||
//# sourceMappingURL=svelte_internal_disclose-version.js.map
|
||||
7
frontend/node_modules/.vite/deps/svelte_internal_disclose-version.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/svelte_internal_disclose-version.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": ["../../svelte/src/runtime/internal/disclose-version/index.js"],
|
||||
"sourcesContent": ["import { PUBLIC_VERSION } from '../../../shared/version.js';\n\nif (typeof window !== 'undefined')\n\t// @ts-ignore\n\t(window.__svelte || (window.__svelte = { v: new Set() })).v.add(PUBLIC_VERSION);\n"],
|
||||
"mappings": ";;;;;;AAEA,IAAI,OAAO,WAAW;AAErB,GAAC,OAAO,aAAa,OAAO,WAAW,EAAE,GAAG,oBAAI,IAAI,EAAE,IAAI,EAAE,IAAI,cAAc;",
|
||||
"names": []
|
||||
}
|
||||
216
frontend/node_modules/.vite/deps/svelte_motion.js
generated
vendored
Normal file
216
frontend/node_modules/.vite/deps/svelte_motion.js
generated
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
import {
|
||||
writable
|
||||
} from "./chunk-7BDCSBRK.js";
|
||||
import "./chunk-ETWSND26.js";
|
||||
import {
|
||||
assign,
|
||||
identity,
|
||||
loop,
|
||||
now
|
||||
} from "./chunk-LBLMGS3B.js";
|
||||
import "./chunk-672HPU4M.js";
|
||||
import "./chunk-JVWSFFO4.js";
|
||||
|
||||
// node_modules/svelte/src/runtime/motion/utils.js
|
||||
function is_date(obj) {
|
||||
return Object.prototype.toString.call(obj) === "[object Date]";
|
||||
}
|
||||
|
||||
// node_modules/svelte/src/runtime/motion/spring.js
|
||||
function tick_spring(ctx, last_value, current_value, target_value) {
|
||||
if (typeof current_value === "number" || is_date(current_value)) {
|
||||
const delta = target_value - current_value;
|
||||
const velocity = (current_value - last_value) / (ctx.dt || 1 / 60);
|
||||
const spring2 = ctx.opts.stiffness * delta;
|
||||
const damper = ctx.opts.damping * velocity;
|
||||
const acceleration = (spring2 - damper) * ctx.inv_mass;
|
||||
const d = (velocity + acceleration) * ctx.dt;
|
||||
if (Math.abs(d) < ctx.opts.precision && Math.abs(delta) < ctx.opts.precision) {
|
||||
return target_value;
|
||||
} else {
|
||||
ctx.settled = false;
|
||||
return is_date(current_value) ? new Date(current_value.getTime() + d) : current_value + d;
|
||||
}
|
||||
} else if (Array.isArray(current_value)) {
|
||||
return current_value.map(
|
||||
(_, i) => tick_spring(ctx, last_value[i], current_value[i], target_value[i])
|
||||
);
|
||||
} else if (typeof current_value === "object") {
|
||||
const next_value = {};
|
||||
for (const k in current_value) {
|
||||
next_value[k] = tick_spring(ctx, last_value[k], current_value[k], target_value[k]);
|
||||
}
|
||||
return next_value;
|
||||
} else {
|
||||
throw new Error(`Cannot spring ${typeof current_value} values`);
|
||||
}
|
||||
}
|
||||
function spring(value, opts = {}) {
|
||||
const store = writable(value);
|
||||
const { stiffness = 0.15, damping = 0.8, precision = 0.01 } = opts;
|
||||
let last_time;
|
||||
let task;
|
||||
let current_token;
|
||||
let last_value = value;
|
||||
let target_value = value;
|
||||
let inv_mass = 1;
|
||||
let inv_mass_recovery_rate = 0;
|
||||
let cancel_task = false;
|
||||
function set(new_value, opts2 = {}) {
|
||||
target_value = new_value;
|
||||
const token = current_token = {};
|
||||
if (value == null || opts2.hard || spring2.stiffness >= 1 && spring2.damping >= 1) {
|
||||
cancel_task = true;
|
||||
last_time = now();
|
||||
last_value = new_value;
|
||||
store.set(value = target_value);
|
||||
return Promise.resolve();
|
||||
} else if (opts2.soft) {
|
||||
const rate = opts2.soft === true ? 0.5 : +opts2.soft;
|
||||
inv_mass_recovery_rate = 1 / (rate * 60);
|
||||
inv_mass = 0;
|
||||
}
|
||||
if (!task) {
|
||||
last_time = now();
|
||||
cancel_task = false;
|
||||
task = loop((now2) => {
|
||||
if (cancel_task) {
|
||||
cancel_task = false;
|
||||
task = null;
|
||||
return false;
|
||||
}
|
||||
inv_mass = Math.min(inv_mass + inv_mass_recovery_rate, 1);
|
||||
const ctx = {
|
||||
inv_mass,
|
||||
opts: spring2,
|
||||
settled: true,
|
||||
dt: (now2 - last_time) * 60 / 1e3
|
||||
};
|
||||
const next_value = tick_spring(ctx, last_value, value, target_value);
|
||||
last_time = now2;
|
||||
last_value = value;
|
||||
store.set(value = next_value);
|
||||
if (ctx.settled) {
|
||||
task = null;
|
||||
}
|
||||
return !ctx.settled;
|
||||
});
|
||||
}
|
||||
return new Promise((fulfil) => {
|
||||
task.promise.then(() => {
|
||||
if (token === current_token) fulfil();
|
||||
});
|
||||
});
|
||||
}
|
||||
const spring2 = {
|
||||
set,
|
||||
update: (fn, opts2) => set(fn(target_value, value), opts2),
|
||||
subscribe: store.subscribe,
|
||||
stiffness,
|
||||
damping,
|
||||
precision
|
||||
};
|
||||
return spring2;
|
||||
}
|
||||
|
||||
// node_modules/svelte/src/runtime/motion/tweened.js
|
||||
function get_interpolator(a, b) {
|
||||
if (a === b || a !== a) return () => a;
|
||||
const type = typeof a;
|
||||
if (type !== typeof b || Array.isArray(a) !== Array.isArray(b)) {
|
||||
throw new Error("Cannot interpolate values of different type");
|
||||
}
|
||||
if (Array.isArray(a)) {
|
||||
const arr = b.map((bi, i) => {
|
||||
return get_interpolator(a[i], bi);
|
||||
});
|
||||
return (t) => arr.map((fn) => fn(t));
|
||||
}
|
||||
if (type === "object") {
|
||||
if (!a || !b) throw new Error("Object cannot be null");
|
||||
if (is_date(a) && is_date(b)) {
|
||||
a = a.getTime();
|
||||
b = b.getTime();
|
||||
const delta = b - a;
|
||||
return (t) => new Date(a + t * delta);
|
||||
}
|
||||
const keys = Object.keys(b);
|
||||
const interpolators = {};
|
||||
keys.forEach((key) => {
|
||||
interpolators[key] = get_interpolator(a[key], b[key]);
|
||||
});
|
||||
return (t) => {
|
||||
const result = {};
|
||||
keys.forEach((key) => {
|
||||
result[key] = interpolators[key](t);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
}
|
||||
if (type === "number") {
|
||||
const delta = b - a;
|
||||
return (t) => a + t * delta;
|
||||
}
|
||||
throw new Error(`Cannot interpolate ${type} values`);
|
||||
}
|
||||
function tweened(value, defaults = {}) {
|
||||
const store = writable(value);
|
||||
let task;
|
||||
let target_value = value;
|
||||
function set(new_value, opts) {
|
||||
if (value == null) {
|
||||
store.set(value = new_value);
|
||||
return Promise.resolve();
|
||||
}
|
||||
target_value = new_value;
|
||||
let previous_task = task;
|
||||
let started = false;
|
||||
let {
|
||||
delay = 0,
|
||||
duration = 400,
|
||||
easing = identity,
|
||||
interpolate = get_interpolator
|
||||
} = assign(assign({}, defaults), opts);
|
||||
if (duration === 0) {
|
||||
if (previous_task) {
|
||||
previous_task.abort();
|
||||
previous_task = null;
|
||||
}
|
||||
store.set(value = target_value);
|
||||
return Promise.resolve();
|
||||
}
|
||||
const start = now() + delay;
|
||||
let fn;
|
||||
task = loop((now2) => {
|
||||
if (now2 < start) return true;
|
||||
if (!started) {
|
||||
fn = interpolate(value, new_value);
|
||||
if (typeof duration === "function") duration = duration(value, new_value);
|
||||
started = true;
|
||||
}
|
||||
if (previous_task) {
|
||||
previous_task.abort();
|
||||
previous_task = null;
|
||||
}
|
||||
const elapsed = now2 - start;
|
||||
if (elapsed > /** @type {number} */
|
||||
duration) {
|
||||
store.set(value = new_value);
|
||||
return false;
|
||||
}
|
||||
store.set(value = fn(easing(elapsed / duration)));
|
||||
return true;
|
||||
});
|
||||
return task.promise;
|
||||
}
|
||||
return {
|
||||
set,
|
||||
update: (fn, opts) => set(fn(target_value, value), opts),
|
||||
subscribe: store.subscribe
|
||||
};
|
||||
}
|
||||
export {
|
||||
spring,
|
||||
tweened
|
||||
};
|
||||
//# sourceMappingURL=svelte_motion.js.map
|
||||
7
frontend/node_modules/.vite/deps/svelte_motion.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/svelte_motion.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
19
frontend/node_modules/.vite/deps/svelte_store.js
generated
vendored
Normal file
19
frontend/node_modules/.vite/deps/svelte_store.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import {
|
||||
derived,
|
||||
readable,
|
||||
readonly,
|
||||
writable
|
||||
} from "./chunk-7BDCSBRK.js";
|
||||
import {
|
||||
get_store_value
|
||||
} from "./chunk-LBLMGS3B.js";
|
||||
import "./chunk-672HPU4M.js";
|
||||
import "./chunk-JVWSFFO4.js";
|
||||
export {
|
||||
derived,
|
||||
get_store_value as get,
|
||||
readable,
|
||||
readonly,
|
||||
writable
|
||||
};
|
||||
//# sourceMappingURL=svelte_store.js.map
|
||||
7
frontend/node_modules/.vite/deps/svelte_store.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/svelte_store.js.map
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 3,
|
||||
"sources": [],
|
||||
"sourcesContent": [],
|
||||
"mappings": "",
|
||||
"names": []
|
||||
}
|
||||
175
frontend/node_modules/.vite/deps/svelte_transition.js
generated
vendored
Normal file
175
frontend/node_modules/.vite/deps/svelte_transition.js
generated
vendored
Normal file
@@ -0,0 +1,175 @@
|
||||
import {
|
||||
cubicInOut,
|
||||
cubicOut
|
||||
} from "./chunk-ETWSND26.js";
|
||||
import {
|
||||
assign,
|
||||
identity,
|
||||
is_function,
|
||||
split_css_unit
|
||||
} from "./chunk-LBLMGS3B.js";
|
||||
import "./chunk-672HPU4M.js";
|
||||
import "./chunk-JVWSFFO4.js";
|
||||
|
||||
// node_modules/svelte/src/runtime/transition/index.js
|
||||
function blur(node, { delay = 0, duration = 400, easing = cubicInOut, amount = 5, opacity = 0 } = {}) {
|
||||
const style = getComputedStyle(node);
|
||||
const target_opacity = +style.opacity;
|
||||
const f = style.filter === "none" ? "" : style.filter;
|
||||
const od = target_opacity * (1 - opacity);
|
||||
const [value, unit] = split_css_unit(amount);
|
||||
return {
|
||||
delay,
|
||||
duration,
|
||||
easing,
|
||||
css: (_t, u) => `opacity: ${target_opacity - od * u}; filter: ${f} blur(${u * value}${unit});`
|
||||
};
|
||||
}
|
||||
function fade(node, { delay = 0, duration = 400, easing = identity } = {}) {
|
||||
const o = +getComputedStyle(node).opacity;
|
||||
return {
|
||||
delay,
|
||||
duration,
|
||||
easing,
|
||||
css: (t) => `opacity: ${t * o}`
|
||||
};
|
||||
}
|
||||
function fly(node, { delay = 0, duration = 400, easing = cubicOut, x = 0, y = 0, opacity = 0 } = {}) {
|
||||
const style = getComputedStyle(node);
|
||||
const target_opacity = +style.opacity;
|
||||
const transform = style.transform === "none" ? "" : style.transform;
|
||||
const od = target_opacity * (1 - opacity);
|
||||
const [xValue, xUnit] = split_css_unit(x);
|
||||
const [yValue, yUnit] = split_css_unit(y);
|
||||
return {
|
||||
delay,
|
||||
duration,
|
||||
easing,
|
||||
css: (t, u) => `
|
||||
transform: ${transform} translate(${(1 - t) * xValue}${xUnit}, ${(1 - t) * yValue}${yUnit});
|
||||
opacity: ${target_opacity - od * u}`
|
||||
};
|
||||
}
|
||||
function slide(node, { delay = 0, duration = 400, easing = cubicOut, axis = "y" } = {}) {
|
||||
const style = getComputedStyle(node);
|
||||
const opacity = +style.opacity;
|
||||
const primary_property = axis === "y" ? "height" : "width";
|
||||
const primary_property_value = parseFloat(style[primary_property]);
|
||||
const secondary_properties = axis === "y" ? ["top", "bottom"] : ["left", "right"];
|
||||
const capitalized_secondary_properties = secondary_properties.map(
|
||||
(e) => `${e[0].toUpperCase()}${e.slice(1)}`
|
||||
);
|
||||
const padding_start_value = parseFloat(style[`padding${capitalized_secondary_properties[0]}`]);
|
||||
const padding_end_value = parseFloat(style[`padding${capitalized_secondary_properties[1]}`]);
|
||||
const margin_start_value = parseFloat(style[`margin${capitalized_secondary_properties[0]}`]);
|
||||
const margin_end_value = parseFloat(style[`margin${capitalized_secondary_properties[1]}`]);
|
||||
const border_width_start_value = parseFloat(
|
||||
style[`border${capitalized_secondary_properties[0]}Width`]
|
||||
);
|
||||
const border_width_end_value = parseFloat(
|
||||
style[`border${capitalized_secondary_properties[1]}Width`]
|
||||
);
|
||||
return {
|
||||
delay,
|
||||
duration,
|
||||
easing,
|
||||
css: (t) => `overflow: hidden;opacity: ${Math.min(t * 20, 1) * opacity};${primary_property}: ${t * primary_property_value}px;padding-${secondary_properties[0]}: ${t * padding_start_value}px;padding-${secondary_properties[1]}: ${t * padding_end_value}px;margin-${secondary_properties[0]}: ${t * margin_start_value}px;margin-${secondary_properties[1]}: ${t * margin_end_value}px;border-${secondary_properties[0]}-width: ${t * border_width_start_value}px;border-${secondary_properties[1]}-width: ${t * border_width_end_value}px;`
|
||||
};
|
||||
}
|
||||
function scale(node, { delay = 0, duration = 400, easing = cubicOut, start = 0, opacity = 0 } = {}) {
|
||||
const style = getComputedStyle(node);
|
||||
const target_opacity = +style.opacity;
|
||||
const transform = style.transform === "none" ? "" : style.transform;
|
||||
const sd = 1 - start;
|
||||
const od = target_opacity * (1 - opacity);
|
||||
return {
|
||||
delay,
|
||||
duration,
|
||||
easing,
|
||||
css: (_t, u) => `
|
||||
transform: ${transform} scale(${1 - sd * u});
|
||||
opacity: ${target_opacity - od * u}
|
||||
`
|
||||
};
|
||||
}
|
||||
function draw(node, { delay = 0, speed, duration, easing = cubicInOut } = {}) {
|
||||
let len = node.getTotalLength();
|
||||
const style = getComputedStyle(node);
|
||||
if (style.strokeLinecap !== "butt") {
|
||||
len += parseInt(style.strokeWidth);
|
||||
}
|
||||
if (duration === void 0) {
|
||||
if (speed === void 0) {
|
||||
duration = 800;
|
||||
} else {
|
||||
duration = len / speed;
|
||||
}
|
||||
} else if (typeof duration === "function") {
|
||||
duration = duration(len);
|
||||
}
|
||||
return {
|
||||
delay,
|
||||
duration,
|
||||
easing,
|
||||
css: (_, u) => `
|
||||
stroke-dasharray: ${len};
|
||||
stroke-dashoffset: ${u * len};
|
||||
`
|
||||
};
|
||||
}
|
||||
function crossfade({ fallback, ...defaults }) {
|
||||
const to_receive = /* @__PURE__ */ new Map();
|
||||
const to_send = /* @__PURE__ */ new Map();
|
||||
function crossfade2(from_node, node, params) {
|
||||
const {
|
||||
delay = 0,
|
||||
duration = (d2) => Math.sqrt(d2) * 30,
|
||||
easing = cubicOut
|
||||
} = assign(assign({}, defaults), params);
|
||||
const from = from_node.getBoundingClientRect();
|
||||
const to = node.getBoundingClientRect();
|
||||
const dx = from.left - to.left;
|
||||
const dy = from.top - to.top;
|
||||
const dw = from.width / to.width;
|
||||
const dh = from.height / to.height;
|
||||
const d = Math.sqrt(dx * dx + dy * dy);
|
||||
const style = getComputedStyle(node);
|
||||
const transform = style.transform === "none" ? "" : style.transform;
|
||||
const opacity = +style.opacity;
|
||||
return {
|
||||
delay,
|
||||
duration: is_function(duration) ? duration(d) : duration,
|
||||
easing,
|
||||
css: (t, u) => `
|
||||
opacity: ${t * opacity};
|
||||
transform-origin: top left;
|
||||
transform: ${transform} translate(${u * dx}px,${u * dy}px) scale(${t + (1 - t) * dw}, ${t + (1 - t) * dh});
|
||||
`
|
||||
};
|
||||
}
|
||||
function transition(items, counterparts, intro) {
|
||||
return (node, params) => {
|
||||
items.set(params.key, node);
|
||||
return () => {
|
||||
if (counterparts.has(params.key)) {
|
||||
const other_node = counterparts.get(params.key);
|
||||
counterparts.delete(params.key);
|
||||
return crossfade2(other_node, node, params);
|
||||
}
|
||||
items.delete(params.key);
|
||||
return fallback && fallback(node, params, intro);
|
||||
};
|
||||
};
|
||||
}
|
||||
return [transition(to_send, to_receive, false), transition(to_receive, to_send, true)];
|
||||
}
|
||||
export {
|
||||
blur,
|
||||
crossfade,
|
||||
draw,
|
||||
fade,
|
||||
fly,
|
||||
scale,
|
||||
slide
|
||||
};
|
||||
//# sourceMappingURL=svelte_transition.js.map
|
||||
7
frontend/node_modules/.vite/deps/svelte_transition.js.map
generated
vendored
Normal file
7
frontend/node_modules/.vite/deps/svelte_transition.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
202
frontend/node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
202
frontend/node_modules/@ampproject/remapping/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
218
frontend/node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
218
frontend/node_modules/@ampproject/remapping/README.md
generated
vendored
Normal file
@@ -0,0 +1,218 @@
|
||||
# @ampproject/remapping
|
||||
|
||||
> Remap sequential sourcemaps through transformations to point at the original source code
|
||||
|
||||
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
||||
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
||||
with webpack", all pointing to the correct location in your original source code.
|
||||
|
||||
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
||||
they only need to generate an output sourcemap. This greatly simplifies building custom
|
||||
transformations (think a find-and-replace).
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @ampproject/remapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
function remapping(
|
||||
map: SourceMap | SourceMap[],
|
||||
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
||||
options?: { excludeContent: boolean, decodedMappings: boolean }
|
||||
): SourceMap;
|
||||
|
||||
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
||||
// "source" location (where child sources are resolved relative to, or the location of original
|
||||
// source), and the ability to override the "content" of an original source for inclusion in the
|
||||
// output sourcemap.
|
||||
type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
}
|
||||
```
|
||||
|
||||
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
||||
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
||||
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
||||
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
||||
|
||||
```js
|
||||
// Babel transformed "helloworld.js" into "transformed.js"
|
||||
const transformedMap = JSON.stringify({
|
||||
file: 'transformed.js',
|
||||
// 1st column of 2nd line of output file translates into the 1st source
|
||||
// file, line 3, column 2
|
||||
mappings: ';CAEE',
|
||||
sources: ['helloworld.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
// Uglify minified "transformed.js" into "transformed.min.js"
|
||||
const minifiedTransformedMap = JSON.stringify({
|
||||
file: 'transformed.min.js',
|
||||
// 0th column of 1st line of output file translates into the 1st source
|
||||
// file, line 2, column 1.
|
||||
mappings: 'AACC',
|
||||
names: [],
|
||||
sources: ['transformed.js'],
|
||||
version: 3,
|
||||
});
|
||||
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
// The "transformed.js" file is an transformed file.
|
||||
if (file === 'transformed.js') {
|
||||
// The root importer is empty.
|
||||
console.assert(ctx.importer === '');
|
||||
// The depth in the sourcemap tree we're currently loading.
|
||||
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
||||
console.assert(ctx.depth === 1);
|
||||
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
// Loader will be called to load transformedMap's source file pointers as well.
|
||||
console.assert(file === 'helloworld.js');
|
||||
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
||||
console.assert(ctx.importer === 'transformed.js');
|
||||
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
||||
console.assert(ctx.depth === 2);
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
In this example, `loader` will be called twice:
|
||||
|
||||
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
||||
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
||||
be traced through it into the source files it represents.
|
||||
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
||||
we return `null`.
|
||||
|
||||
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
||||
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
||||
column of the 2nd line of the file `helloworld.js`".
|
||||
|
||||
### Multiple transformations of a file
|
||||
|
||||
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
||||
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
||||
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
||||
written as:
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
[minifiedTransformedMap, transformedMap],
|
||||
() => null
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// file: 'transpiled.min.js',
|
||||
// mappings: 'AAEE',
|
||||
// sources: ['helloworld.js'],
|
||||
// version: 3,
|
||||
// };
|
||||
```
|
||||
|
||||
### Advanced control of the loading graph
|
||||
|
||||
#### `source`
|
||||
|
||||
The `source` property can overridden to any value to change the location of the current load. Eg,
|
||||
for an original source file, it allows us to change the location to the original source regardless
|
||||
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
||||
relative resolving location for child sources of the loaded sourcemap.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
||||
// source files are loaded, they will now be relative to `src/`.
|
||||
ctx.source = 'src/transformed.js';
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'src/helloworld.js');
|
||||
// We could futher change the source of this original file, eg, to be inside a nested directory
|
||||
// itself. This will be reflected in the remapped sourcemap.
|
||||
ctx.source = 'src/nested/transformed.js';
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sources: ['src/nested/helloworld.js'],
|
||||
// };
|
||||
```
|
||||
|
||||
|
||||
#### `content`
|
||||
|
||||
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
||||
you to manually provide the source content of the original file regardless of whether the
|
||||
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
||||
the source content.
|
||||
|
||||
```js
|
||||
const remapped = remapping(
|
||||
minifiedTransformedMap,
|
||||
(file, ctx) => {
|
||||
|
||||
if (file === 'transformed.js') {
|
||||
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
||||
// would not include any `sourcesContent` values.
|
||||
return transformedMap;
|
||||
}
|
||||
|
||||
console.assert(file === 'helloworld.js');
|
||||
// We can read the file to provide the source content.
|
||||
ctx.content = fs.readFileSync(file, 'utf8');
|
||||
return null;
|
||||
}
|
||||
);
|
||||
|
||||
console.log(remapped);
|
||||
// {
|
||||
// …,
|
||||
// sourcesContent: [
|
||||
// 'console.log("Hello world!")',
|
||||
// ],
|
||||
// };
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
#### excludeContent
|
||||
|
||||
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
||||
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
||||
the size out the sourcemap.
|
||||
|
||||
#### decodedMappings
|
||||
|
||||
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
||||
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
||||
encoding into a VLQ string.
|
||||
197
frontend/node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
197
frontend/node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
Normal file
@@ -0,0 +1,197 @@
|
||||
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
||||
import { GenMapping, maybeAddSegment, setSourceContent, setIgnore, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||
const EMPTY_SOURCES = [];
|
||||
function SegmentObject(source, line, column, name, content, ignore) {
|
||||
return { source, line, column, name, content, ignore };
|
||||
}
|
||||
function Source(map, sources, source, content, ignore) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
ignore,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null, false);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content, ignore) {
|
||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||
const gen = new GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
const { column, line, name, content, source, ignore } = traced;
|
||||
maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||
if (source && content != null)
|
||||
setSourceContent(gen, source, content);
|
||||
if (ignore)
|
||||
setIgnore(gen, source, true);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||
}
|
||||
const segment = traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
ignore: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content, ignore } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||
return OriginalSource(source, sourceContent, ignored);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.ignoreList = out.ignoreList;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
export { remapping as default };
|
||||
//# sourceMappingURL=remapping.mjs.map
|
||||
1
frontend/node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
1
frontend/node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
202
frontend/node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
202
frontend/node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
|
||||
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
|
||||
})(this, (function (traceMapping, genMapping) { 'use strict';
|
||||
|
||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
||||
const EMPTY_SOURCES = [];
|
||||
function SegmentObject(source, line, column, name, content, ignore) {
|
||||
return { source, line, column, name, content, ignore };
|
||||
}
|
||||
function Source(map, sources, source, content, ignore) {
|
||||
return {
|
||||
map,
|
||||
sources,
|
||||
source,
|
||||
content,
|
||||
ignore,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
function MapSource(map, sources) {
|
||||
return Source(map, sources, '', null, false);
|
||||
}
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
function OriginalSource(source, content, ignore) {
|
||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
||||
}
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
function traceMappings(tree) {
|
||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
||||
const gen = new genMapping.GenMapping({ file: tree.map.file });
|
||||
const { sources: rootSources, map } = tree;
|
||||
const rootNames = map.names;
|
||||
const rootMappings = traceMapping.decodedMappings(map);
|
||||
for (let i = 0; i < rootMappings.length; i++) {
|
||||
const segments = rootMappings[i];
|
||||
for (let j = 0; j < segments.length; j++) {
|
||||
const segment = segments[j];
|
||||
const genCol = segment[0];
|
||||
let traced = SOURCELESS_MAPPING;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length !== 1) {
|
||||
const source = rootSources[segment[1]];
|
||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||
// respective segment into an original source.
|
||||
if (traced == null)
|
||||
continue;
|
||||
}
|
||||
const { column, line, name, content, source, ignore } = traced;
|
||||
genMapping.maybeAddSegment(gen, i, genCol, source, line, column, name);
|
||||
if (source && content != null)
|
||||
genMapping.setSourceContent(gen, source, content);
|
||||
if (ignore)
|
||||
genMapping.setIgnore(gen, source, true);
|
||||
}
|
||||
}
|
||||
return gen;
|
||||
}
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
function originalPositionFor(source, line, column, name) {
|
||||
if (!source.map) {
|
||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
||||
}
|
||||
const segment = traceMapping.traceSegment(source.map, line, column);
|
||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||
if (segment == null)
|
||||
return null;
|
||||
// 1-length segments only move the current generated column, there's no source information
|
||||
// to gather from it.
|
||||
if (segment.length === 1)
|
||||
return SOURCELESS_MAPPING;
|
||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||
}
|
||||
|
||||
function asArray(value) {
|
||||
if (Array.isArray(value))
|
||||
return value;
|
||||
return [value];
|
||||
}
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
function buildSourceMapTree(input, loader) {
|
||||
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
|
||||
const map = maps.pop();
|
||||
for (let i = 0; i < maps.length; i++) {
|
||||
if (maps[i].sources.length > 1) {
|
||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||
'Did you specify these with the most recent transformation maps first?');
|
||||
}
|
||||
}
|
||||
let tree = build(map, loader, '', 0);
|
||||
for (let i = maps.length - 1; i >= 0; i--) {
|
||||
tree = MapSource(maps[i], [tree]);
|
||||
}
|
||||
return tree;
|
||||
}
|
||||
function build(map, loader, importer, importerDepth) {
|
||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
||||
const depth = importerDepth + 1;
|
||||
const children = resolvedSources.map((sourceFile, i) => {
|
||||
// The loading context gives the loader more information about why this file is being loaded
|
||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||
// an unmodified source file.
|
||||
const ctx = {
|
||||
importer,
|
||||
depth,
|
||||
source: sourceFile || '',
|
||||
content: undefined,
|
||||
ignore: undefined,
|
||||
};
|
||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||
// TODO: We should eventually support async loading of sourcemap files.
|
||||
const sourceMap = loader(ctx.source, ctx);
|
||||
const { source, content, ignore } = ctx;
|
||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||
if (sourceMap)
|
||||
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
|
||||
// Else, it's an unmodified source file.
|
||||
// The contents of this unmodified source file can be overridden via the loader context,
|
||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||
// the importing sourcemap's `sourcesContent` field.
|
||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
||||
return OriginalSource(source, sourceContent, ignored);
|
||||
});
|
||||
return MapSource(map, children);
|
||||
}
|
||||
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
class SourceMap {
|
||||
constructor(map, options) {
|
||||
const out = options.decodedMappings ? genMapping.toDecodedMap(map) : genMapping.toEncodedMap(map);
|
||||
this.version = out.version; // SourceMap spec says this should be first.
|
||||
this.file = out.file;
|
||||
this.mappings = out.mappings;
|
||||
this.names = out.names;
|
||||
this.ignoreList = out.ignoreList;
|
||||
this.sourceRoot = out.sourceRoot;
|
||||
this.sources = out.sources;
|
||||
if (!options.excludeContent) {
|
||||
this.sourcesContent = out.sourcesContent;
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
function remapping(input, loader, options) {
|
||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||
const tree = buildSourceMapTree(input, loader);
|
||||
return new SourceMap(traceMappings(tree), opts);
|
||||
}
|
||||
|
||||
return remapping;
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=remapping.umd.js.map
|
||||
1
frontend/node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
1
frontend/node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
frontend/node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
14
frontend/node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import type { MapSource as MapSourceType } from './source-map-tree';
|
||||
import type { SourceMapInput, SourceMapLoader } from './types';
|
||||
/**
|
||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||
* `OriginalSource`s and `SourceMapTree`s.
|
||||
*
|
||||
* Every sourcemap is composed of a collection of source files and mappings
|
||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||
* does not have an associated sourcemap, it is considered an original,
|
||||
* unmodified source file.
|
||||
*/
|
||||
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
|
||||
20
frontend/node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
20
frontend/node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import SourceMap from './source-map';
|
||||
import type { SourceMapInput, SourceMapLoader, Options } from './types';
|
||||
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
|
||||
export type { SourceMap };
|
||||
/**
|
||||
* Traces through all the mappings in the root sourcemap, through the sources
|
||||
* (and their sourcemaps), all the way back to the original source location.
|
||||
*
|
||||
* `loader` will be called every time we encounter a source file. If it returns
|
||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||
* it returns a falsey value, that source file is treated as an original,
|
||||
* unmodified source file.
|
||||
*
|
||||
* Pass `excludeContent` to exclude any self-containing source file content
|
||||
* from the output sourcemap.
|
||||
*
|
||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||
* VLQ encoded) mappings.
|
||||
*/
|
||||
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
|
||||
45
frontend/node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
45
frontend/node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
import { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { TraceMap } from '@jridgewell/trace-mapping';
|
||||
export declare type SourceMapSegmentObject = {
|
||||
column: number;
|
||||
line: number;
|
||||
name: string;
|
||||
source: string;
|
||||
content: string | null;
|
||||
ignore: boolean;
|
||||
};
|
||||
export declare type OriginalSource = {
|
||||
map: null;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: string | null;
|
||||
ignore: boolean;
|
||||
};
|
||||
export declare type MapSource = {
|
||||
map: TraceMap;
|
||||
sources: Sources[];
|
||||
source: string;
|
||||
content: null;
|
||||
ignore: false;
|
||||
};
|
||||
export declare type Sources = OriginalSource | MapSource;
|
||||
/**
|
||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||
* (which may themselves be SourceMapTrees).
|
||||
*/
|
||||
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
|
||||
/**
|
||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||
* segment tracing ends at the `OriginalSource`.
|
||||
*/
|
||||
export declare function OriginalSource(source: string, content: string | null, ignore: boolean): OriginalSource;
|
||||
/**
|
||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||
* resolving each mapping in terms of the original source files.
|
||||
*/
|
||||
export declare function traceMappings(tree: MapSource): GenMapping;
|
||||
/**
|
||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||
* child SourceMapTrees, until we find the original source map.
|
||||
*/
|
||||
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
|
||||
18
frontend/node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
18
frontend/node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import type { GenMapping } from '@jridgewell/gen-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
|
||||
/**
|
||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||
* provided to it.
|
||||
*/
|
||||
export default class SourceMap {
|
||||
file?: string | null;
|
||||
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
|
||||
sourceRoot?: string;
|
||||
names: string[];
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
ignoreList: number[] | undefined;
|
||||
constructor(map: GenMapping, options: Options);
|
||||
toString(): string;
|
||||
}
|
||||
15
frontend/node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
15
frontend/node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
|
||||
export type { SourceMapInput };
|
||||
export declare type LoaderContext = {
|
||||
readonly importer: string;
|
||||
readonly depth: number;
|
||||
source: string;
|
||||
content: string | null | undefined;
|
||||
ignore: boolean | undefined;
|
||||
};
|
||||
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
|
||||
export declare type Options = {
|
||||
excludeContent?: boolean;
|
||||
decodedMappings?: boolean;
|
||||
};
|
||||
75
frontend/node_modules/@ampproject/remapping/package.json
generated
vendored
Normal file
75
frontend/node_modules/@ampproject/remapping/package.json
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
{
|
||||
"name": "@ampproject/remapping",
|
||||
"version": "2.3.0",
|
||||
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map",
|
||||
"remap"
|
||||
],
|
||||
"main": "dist/remapping.umd.js",
|
||||
"module": "dist/remapping.mjs",
|
||||
"types": "dist/types/remapping.d.ts",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/remapping.d.ts",
|
||||
"browser": "./dist/remapping.umd.js",
|
||||
"require": "./dist/remapping.umd.js",
|
||||
"import": "./dist/remapping.mjs"
|
||||
},
|
||||
"./dist/remapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"author": "Justin Ridgewell <jridgewell@google.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/ampproject/remapping.git"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=6.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "jest --coverage",
|
||||
"test:watch": "jest --coverage --watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.2",
|
||||
"@types/jest": "27.4.1",
|
||||
"@typescript-eslint/eslint-plugin": "5.20.0",
|
||||
"@typescript-eslint/parser": "5.20.0",
|
||||
"eslint": "8.14.0",
|
||||
"eslint-config-prettier": "8.5.0",
|
||||
"jest": "27.5.1",
|
||||
"jest-config": "27.5.1",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.6.2",
|
||||
"rollup": "2.70.2",
|
||||
"ts-jest": "27.1.4",
|
||||
"tslib": "2.4.0",
|
||||
"typescript": "4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jridgewell/gen-mapping": "^0.3.5",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
}
|
||||
}
|
||||
3
frontend/node_modules/@esbuild/win32-x64/README.md
generated
vendored
Normal file
3
frontend/node_modules/@esbuild/win32-x64/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# esbuild
|
||||
|
||||
This is the Windows 64-bit binary for esbuild, a JavaScript bundler and minifier. See https://github.com/evanw/esbuild for details.
|
||||
BIN
frontend/node_modules/@esbuild/win32-x64/esbuild.exe
generated
vendored
Normal file
BIN
frontend/node_modules/@esbuild/win32-x64/esbuild.exe
generated
vendored
Normal file
Binary file not shown.
20
frontend/node_modules/@esbuild/win32-x64/package.json
generated
vendored
Normal file
20
frontend/node_modules/@esbuild/win32-x64/package.json
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "@esbuild/win32-x64",
|
||||
"version": "0.21.5",
|
||||
"description": "The Windows 64-bit binary for esbuild, a JavaScript bundler.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/evanw/esbuild.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"preferUnplugged": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
19
frontend/node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
19
frontend/node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright 2024 Justin Ridgewell <justin@ridgewell.name>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
227
frontend/node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
227
frontend/node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
Normal file
@@ -0,0 +1,227 @@
|
||||
# @jridgewell/gen-mapping
|
||||
|
||||
> Generate source maps
|
||||
|
||||
`gen-mapping` allows you to generate a source map during transpilation or minification.
|
||||
With a source map, you're able to trace the original location in the source file, either in Chrome's
|
||||
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
|
||||
|
||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
|
||||
provides the same `addMapping` and `setSourceContent` API.
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm install @jridgewell/gen-mapping
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
||||
|
||||
const map = new GenMapping({
|
||||
file: 'output.js',
|
||||
sourceRoot: 'https://example.com/',
|
||||
});
|
||||
|
||||
setSourceContent(map, 'input.js', `function foo() {}`);
|
||||
|
||||
addMapping(map, {
|
||||
// Lines start at line 1, columns at column 0.
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
addMapping(map, {
|
||||
generated: { line: 1, column: 9 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 9 },
|
||||
name: 'foo',
|
||||
});
|
||||
|
||||
assert.deepEqual(toDecodedMap(map), {
|
||||
version: 3,
|
||||
file: 'output.js',
|
||||
names: ['foo'],
|
||||
sourceRoot: 'https://example.com/',
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['function foo() {}'],
|
||||
mappings: [
|
||||
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
|
||||
],
|
||||
});
|
||||
|
||||
assert.deepEqual(toEncodedMap(map), {
|
||||
version: 3,
|
||||
file: 'output.js',
|
||||
names: ['foo'],
|
||||
sourceRoot: 'https://example.com/',
|
||||
sources: ['input.js'],
|
||||
sourcesContent: ['function foo() {}'],
|
||||
mappings: 'AAAA,SAASA',
|
||||
});
|
||||
```
|
||||
|
||||
### Smaller Sourcemaps
|
||||
|
||||
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
|
||||
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
|
||||
intelligently determine if this marking adds useful information. If not, the marking will be
|
||||
skipped.
|
||||
|
||||
```typescript
|
||||
import { maybeAddMapping } from '@jridgewell/gen-mapping';
|
||||
|
||||
const map = new GenMapping();
|
||||
|
||||
// Adding a sourceless marking at the beginning of a line isn't useful.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
// Adding a new source marking is useful.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
// But adding another marking pointing to the exact same original location isn't, even if the
|
||||
// generated column changed.
|
||||
maybeAddMapping(map, {
|
||||
generated: { line: 1, column: 9 },
|
||||
source: 'input.js',
|
||||
original: { line: 1, column: 0 },
|
||||
});
|
||||
|
||||
assert.deepEqual(toEncodedMap(map), {
|
||||
version: 3,
|
||||
names: [],
|
||||
sources: ['input.js'],
|
||||
sourcesContent: [null],
|
||||
mappings: 'AAAA',
|
||||
});
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v18.0.0
|
||||
|
||||
amp.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 5852872 bytes
|
||||
gen-mapping: addMapping 7716042 bytes
|
||||
source-map-js 6143250 bytes
|
||||
source-map-0.6.1 6124102 bytes
|
||||
source-map-0.8.0 6121173 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
|
||||
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
|
||||
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
|
||||
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
|
||||
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
|
||||
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
|
||||
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
|
||||
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
|
||||
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 37578063 bytes
|
||||
gen-mapping: addMapping 37212897 bytes
|
||||
source-map-js 47638527 bytes
|
||||
source-map-0.6.1 47690503 bytes
|
||||
source-map-0.8.0 47470188 bytes
|
||||
Smallest memory usage is gen-mapping: addMapping
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
|
||||
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
|
||||
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
|
||||
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
|
||||
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
|
||||
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
|
||||
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
|
||||
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
|
||||
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 416247 bytes
|
||||
gen-mapping: addMapping 419824 bytes
|
||||
source-map-js 1024619 bytes
|
||||
source-map-0.6.1 1146004 bytes
|
||||
source-map-0.8.0 1113250 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
|
||||
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
|
||||
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
|
||||
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
|
||||
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
|
||||
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
|
||||
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
|
||||
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
|
||||
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map
|
||||
Memory Usage:
|
||||
gen-mapping: addSegment 975096 bytes
|
||||
gen-mapping: addMapping 1102981 bytes
|
||||
source-map-js 2918836 bytes
|
||||
source-map-0.6.1 2885435 bytes
|
||||
source-map-0.8.0 2874336 bytes
|
||||
Smallest memory usage is gen-mapping: addSegment
|
||||
|
||||
Adding speed:
|
||||
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
|
||||
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
|
||||
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
|
||||
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
|
||||
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
|
||||
Fastest is gen-mapping: addSegment
|
||||
|
||||
Generate speed:
|
||||
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
|
||||
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
|
||||
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
|
||||
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
|
||||
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
|
||||
Fastest is gen-mapping: decoded output
|
||||
```
|
||||
|
||||
[source-map]: https://www.npmjs.com/package/source-map
|
||||
[trace-mapping]: https://github.com/jridgewell/sourcemaps/tree/main/packages/trace-mapping
|
||||
292
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
292
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
Normal file
@@ -0,0 +1,292 @@
|
||||
// src/set-array.ts
|
||||
var SetArray = class {
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null };
|
||||
this.array = [];
|
||||
}
|
||||
};
|
||||
function cast(set) {
|
||||
return set;
|
||||
}
|
||||
function get(setarr, key) {
|
||||
return cast(setarr)._indexes[key];
|
||||
}
|
||||
function put(setarr, key) {
|
||||
const index = get(setarr, key);
|
||||
if (index !== void 0) return index;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
const length = array.push(key);
|
||||
return indexes[key] = length - 1;
|
||||
}
|
||||
function remove(setarr, key) {
|
||||
const index = get(setarr, key);
|
||||
if (index === void 0) return;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
for (let i = index + 1; i < array.length; i++) {
|
||||
const k = array[i];
|
||||
array[i - 1] = k;
|
||||
indexes[k]--;
|
||||
}
|
||||
indexes[key] = void 0;
|
||||
array.pop();
|
||||
}
|
||||
|
||||
// src/gen-mapping.ts
|
||||
import {
|
||||
encode
|
||||
} from "@jridgewell/sourcemap-codec";
|
||||
import { TraceMap, decodedMappings } from "@jridgewell/trace-mapping";
|
||||
|
||||
// src/sourcemap-segment.ts
|
||||
var COLUMN = 0;
|
||||
var SOURCES_INDEX = 1;
|
||||
var SOURCE_LINE = 2;
|
||||
var SOURCE_COLUMN = 3;
|
||||
var NAMES_INDEX = 4;
|
||||
|
||||
// src/gen-mapping.ts
|
||||
var NO_NAME = -1;
|
||||
var GenMapping = class {
|
||||
constructor({ file, sourceRoot } = {}) {
|
||||
this._names = new SetArray();
|
||||
this._sources = new SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this._ignoreList = new SetArray();
|
||||
}
|
||||
};
|
||||
function cast2(map) {
|
||||
return map;
|
||||
}
|
||||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
return addSegmentInternal(
|
||||
false,
|
||||
map,
|
||||
genLine,
|
||||
genColumn,
|
||||
source,
|
||||
sourceLine,
|
||||
sourceColumn,
|
||||
name,
|
||||
content
|
||||
);
|
||||
}
|
||||
function addMapping(map, mapping) {
|
||||
return addMappingInternal(false, map, mapping);
|
||||
}
|
||||
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(
|
||||
true,
|
||||
map,
|
||||
genLine,
|
||||
genColumn,
|
||||
source,
|
||||
sourceLine,
|
||||
sourceColumn,
|
||||
name,
|
||||
content
|
||||
);
|
||||
};
|
||||
var maybeAddMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping);
|
||||
};
|
||||
function setSourceContent(map, source, content) {
|
||||
const {
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast2(map);
|
||||
const index = put(sources, source);
|
||||
sourcesContent[index] = content;
|
||||
}
|
||||
function setIgnore(map, source, ignore = true) {
|
||||
const {
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_ignoreList: ignoreList
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast2(map);
|
||||
const index = put(sources, source);
|
||||
if (index === sourcesContent.length) sourcesContent[index] = null;
|
||||
if (ignore) put(ignoreList, index);
|
||||
else remove(ignoreList, index);
|
||||
}
|
||||
function toDecodedMap(map) {
|
||||
const {
|
||||
_mappings: mappings,
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_names: names,
|
||||
_ignoreList: ignoreList
|
||||
// _originalScopes: originalScopes,
|
||||
// _generatedRanges: generatedRanges,
|
||||
} = cast2(map);
|
||||
removeEmptyFinalLines(mappings);
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file || void 0,
|
||||
names: names.array,
|
||||
sourceRoot: map.sourceRoot || void 0,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
// originalScopes,
|
||||
// generatedRanges,
|
||||
ignoreList: ignoreList.array
|
||||
};
|
||||
}
|
||||
function toEncodedMap(map) {
|
||||
const decoded = toDecodedMap(map);
|
||||
return Object.assign({}, decoded, {
|
||||
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
|
||||
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
|
||||
mappings: encode(decoded.mappings)
|
||||
});
|
||||
}
|
||||
function fromMap(input) {
|
||||
const map = new TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
putAll(cast2(gen)._names, map.names);
|
||||
putAll(cast2(gen)._sources, map.sources);
|
||||
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
cast2(gen)._mappings = decodedMappings(map);
|
||||
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);
|
||||
return gen;
|
||||
}
|
||||
function allMappings(map) {
|
||||
const out = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source = void 0;
|
||||
let original = void 0;
|
||||
let name = void 0;
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
out.push({ generated, source, original, name });
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
const {
|
||||
_mappings: mappings,
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_names: names
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast2(map);
|
||||
const line = getIndex(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index)) return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
assert(sourceLine);
|
||||
assert(sourceColumn);
|
||||
const sourcesIndex = put(sources, source);
|
||||
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
return insert(
|
||||
line,
|
||||
index,
|
||||
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]
|
||||
);
|
||||
}
|
||||
function assert(_val) {
|
||||
}
|
||||
function getIndex(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++) {
|
||||
arr[i] = [];
|
||||
}
|
||||
return arr[index];
|
||||
}
|
||||
function getColumnIndex(line, genColumn) {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN]) break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
function removeEmptyFinalLines(mappings) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0) break;
|
||||
}
|
||||
if (len < length) mappings.length = len;
|
||||
}
|
||||
function putAll(setarr, array) {
|
||||
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
|
||||
}
|
||||
function skipSourceless(line, index) {
|
||||
if (index === 0) return true;
|
||||
const prev = line[index - 1];
|
||||
return prev.length === 1;
|
||||
}
|
||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||
if (index === 0) return false;
|
||||
const prev = line[index - 1];
|
||||
if (prev.length === 1) return false;
|
||||
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
|
||||
}
|
||||
function addMappingInternal(skipable, map, mapping) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(
|
||||
skipable,
|
||||
map,
|
||||
generated.line - 1,
|
||||
generated.column,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
);
|
||||
}
|
||||
assert(original);
|
||||
return addSegmentInternal(
|
||||
skipable,
|
||||
map,
|
||||
generated.line - 1,
|
||||
generated.column,
|
||||
source,
|
||||
original.line - 1,
|
||||
original.column,
|
||||
name,
|
||||
content
|
||||
);
|
||||
}
|
||||
export {
|
||||
GenMapping,
|
||||
addMapping,
|
||||
addSegment,
|
||||
allMappings,
|
||||
fromMap,
|
||||
maybeAddMapping,
|
||||
maybeAddSegment,
|
||||
setIgnore,
|
||||
setSourceContent,
|
||||
toDecodedMap,
|
||||
toEncodedMap
|
||||
};
|
||||
//# sourceMappingURL=gen-mapping.mjs.map
|
||||
6
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
6
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
358
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
358
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
Normal file
@@ -0,0 +1,358 @@
|
||||
(function (global, factory) {
|
||||
if (typeof exports === 'object' && typeof module !== 'undefined') {
|
||||
factory(module, require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping'));
|
||||
module.exports = def(module);
|
||||
} else if (typeof define === 'function' && define.amd) {
|
||||
define(['module', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], function(mod) {
|
||||
factory.apply(this, arguments);
|
||||
mod.exports = def(mod);
|
||||
});
|
||||
} else {
|
||||
const mod = { exports: {} };
|
||||
factory(mod, global.sourcemapCodec, global.traceMapping);
|
||||
global = typeof globalThis !== 'undefined' ? globalThis : global || self;
|
||||
global.genMapping = def(mod);
|
||||
}
|
||||
function def(m) { return 'default' in m.exports ? m.exports.default : m.exports; }
|
||||
})(this, (function (module, require_sourcemapCodec, require_traceMapping) {
|
||||
"use strict";
|
||||
var __create = Object.create;
|
||||
var __defProp = Object.defineProperty;
|
||||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||||
var __getProtoOf = Object.getPrototypeOf;
|
||||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||||
var __commonJS = (cb, mod) => function __require() {
|
||||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
||||
};
|
||||
var __export = (target, all) => {
|
||||
for (var name in all)
|
||||
__defProp(target, name, { get: all[name], enumerable: true });
|
||||
};
|
||||
var __copyProps = (to, from, except, desc) => {
|
||||
if (from && typeof from === "object" || typeof from === "function") {
|
||||
for (let key of __getOwnPropNames(from))
|
||||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||||
}
|
||||
return to;
|
||||
};
|
||||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||||
// If the importer is in node compatibility mode or this is not an ESM
|
||||
// file that has been converted to a CommonJS file using a Babel-
|
||||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||||
mod
|
||||
));
|
||||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
||||
|
||||
// umd:@jridgewell/sourcemap-codec
|
||||
var require_sourcemap_codec = __commonJS({
|
||||
"umd:@jridgewell/sourcemap-codec"(exports, module2) {
|
||||
module2.exports = require_sourcemapCodec;
|
||||
}
|
||||
});
|
||||
|
||||
// umd:@jridgewell/trace-mapping
|
||||
var require_trace_mapping = __commonJS({
|
||||
"umd:@jridgewell/trace-mapping"(exports, module2) {
|
||||
module2.exports = require_traceMapping;
|
||||
}
|
||||
});
|
||||
|
||||
// src/gen-mapping.ts
|
||||
var gen_mapping_exports = {};
|
||||
__export(gen_mapping_exports, {
|
||||
GenMapping: () => GenMapping,
|
||||
addMapping: () => addMapping,
|
||||
addSegment: () => addSegment,
|
||||
allMappings: () => allMappings,
|
||||
fromMap: () => fromMap,
|
||||
maybeAddMapping: () => maybeAddMapping,
|
||||
maybeAddSegment: () => maybeAddSegment,
|
||||
setIgnore: () => setIgnore,
|
||||
setSourceContent: () => setSourceContent,
|
||||
toDecodedMap: () => toDecodedMap,
|
||||
toEncodedMap: () => toEncodedMap
|
||||
});
|
||||
module.exports = __toCommonJS(gen_mapping_exports);
|
||||
|
||||
// src/set-array.ts
|
||||
var SetArray = class {
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null };
|
||||
this.array = [];
|
||||
}
|
||||
};
|
||||
function cast(set) {
|
||||
return set;
|
||||
}
|
||||
function get(setarr, key) {
|
||||
return cast(setarr)._indexes[key];
|
||||
}
|
||||
function put(setarr, key) {
|
||||
const index = get(setarr, key);
|
||||
if (index !== void 0) return index;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
const length = array.push(key);
|
||||
return indexes[key] = length - 1;
|
||||
}
|
||||
function remove(setarr, key) {
|
||||
const index = get(setarr, key);
|
||||
if (index === void 0) return;
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
for (let i = index + 1; i < array.length; i++) {
|
||||
const k = array[i];
|
||||
array[i - 1] = k;
|
||||
indexes[k]--;
|
||||
}
|
||||
indexes[key] = void 0;
|
||||
array.pop();
|
||||
}
|
||||
|
||||
// src/gen-mapping.ts
|
||||
var import_sourcemap_codec = __toESM(require_sourcemap_codec());
|
||||
var import_trace_mapping = __toESM(require_trace_mapping());
|
||||
|
||||
// src/sourcemap-segment.ts
|
||||
var COLUMN = 0;
|
||||
var SOURCES_INDEX = 1;
|
||||
var SOURCE_LINE = 2;
|
||||
var SOURCE_COLUMN = 3;
|
||||
var NAMES_INDEX = 4;
|
||||
|
||||
// src/gen-mapping.ts
|
||||
var NO_NAME = -1;
|
||||
var GenMapping = class {
|
||||
constructor({ file, sourceRoot } = {}) {
|
||||
this._names = new SetArray();
|
||||
this._sources = new SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this._ignoreList = new SetArray();
|
||||
}
|
||||
};
|
||||
function cast2(map) {
|
||||
return map;
|
||||
}
|
||||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
return addSegmentInternal(
|
||||
false,
|
||||
map,
|
||||
genLine,
|
||||
genColumn,
|
||||
source,
|
||||
sourceLine,
|
||||
sourceColumn,
|
||||
name,
|
||||
content
|
||||
);
|
||||
}
|
||||
function addMapping(map, mapping) {
|
||||
return addMappingInternal(false, map, mapping);
|
||||
}
|
||||
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
||||
return addSegmentInternal(
|
||||
true,
|
||||
map,
|
||||
genLine,
|
||||
genColumn,
|
||||
source,
|
||||
sourceLine,
|
||||
sourceColumn,
|
||||
name,
|
||||
content
|
||||
);
|
||||
};
|
||||
var maybeAddMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping);
|
||||
};
|
||||
function setSourceContent(map, source, content) {
|
||||
const {
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast2(map);
|
||||
const index = put(sources, source);
|
||||
sourcesContent[index] = content;
|
||||
}
|
||||
function setIgnore(map, source, ignore = true) {
|
||||
const {
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_ignoreList: ignoreList
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast2(map);
|
||||
const index = put(sources, source);
|
||||
if (index === sourcesContent.length) sourcesContent[index] = null;
|
||||
if (ignore) put(ignoreList, index);
|
||||
else remove(ignoreList, index);
|
||||
}
|
||||
function toDecodedMap(map) {
|
||||
const {
|
||||
_mappings: mappings,
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_names: names,
|
||||
_ignoreList: ignoreList
|
||||
// _originalScopes: originalScopes,
|
||||
// _generatedRanges: generatedRanges,
|
||||
} = cast2(map);
|
||||
removeEmptyFinalLines(mappings);
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file || void 0,
|
||||
names: names.array,
|
||||
sourceRoot: map.sourceRoot || void 0,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
// originalScopes,
|
||||
// generatedRanges,
|
||||
ignoreList: ignoreList.array
|
||||
};
|
||||
}
|
||||
function toEncodedMap(map) {
|
||||
const decoded = toDecodedMap(map);
|
||||
return Object.assign({}, decoded, {
|
||||
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
|
||||
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
|
||||
mappings: (0, import_sourcemap_codec.encode)(decoded.mappings)
|
||||
});
|
||||
}
|
||||
function fromMap(input) {
|
||||
const map = new import_trace_mapping.TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
putAll(cast2(gen)._names, map.names);
|
||||
putAll(cast2(gen)._sources, map.sources);
|
||||
cast2(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
cast2(gen)._mappings = (0, import_trace_mapping.decodedMappings)(map);
|
||||
if (map.ignoreList) putAll(cast2(gen)._ignoreList, map.ignoreList);
|
||||
return gen;
|
||||
}
|
||||
function allMappings(map) {
|
||||
const out = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = cast2(map);
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source = void 0;
|
||||
let original = void 0;
|
||||
let name = void 0;
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
out.push({ generated, source, original, name });
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
||||
const {
|
||||
_mappings: mappings,
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_names: names
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast2(map);
|
||||
const line = getIndex(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index)) return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
assert(sourceLine);
|
||||
assert(sourceColumn);
|
||||
const sourcesIndex = put(sources, source);
|
||||
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
return insert(
|
||||
line,
|
||||
index,
|
||||
name ? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] : [genColumn, sourcesIndex, sourceLine, sourceColumn]
|
||||
);
|
||||
}
|
||||
function assert(_val) {
|
||||
}
|
||||
function getIndex(arr, index) {
|
||||
for (let i = arr.length; i <= index; i++) {
|
||||
arr[i] = [];
|
||||
}
|
||||
return arr[index];
|
||||
}
|
||||
function getColumnIndex(line, genColumn) {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN]) break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function insert(array, index, value) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
function removeEmptyFinalLines(mappings) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0) break;
|
||||
}
|
||||
if (len < length) mappings.length = len;
|
||||
}
|
||||
function putAll(setarr, array) {
|
||||
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
|
||||
}
|
||||
function skipSourceless(line, index) {
|
||||
if (index === 0) return true;
|
||||
const prev = line[index - 1];
|
||||
return prev.length === 1;
|
||||
}
|
||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
||||
if (index === 0) return false;
|
||||
const prev = line[index - 1];
|
||||
if (prev.length === 1) return false;
|
||||
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
|
||||
}
|
||||
function addMappingInternal(skipable, map, mapping) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(
|
||||
skipable,
|
||||
map,
|
||||
generated.line - 1,
|
||||
generated.column,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
);
|
||||
}
|
||||
assert(original);
|
||||
return addSegmentInternal(
|
||||
skipable,
|
||||
map,
|
||||
generated.line - 1,
|
||||
generated.column,
|
||||
source,
|
||||
original.line - 1,
|
||||
original.column,
|
||||
name,
|
||||
content
|
||||
);
|
||||
}
|
||||
}));
|
||||
//# sourceMappingURL=gen-mapping.umd.js.map
|
||||
6
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
6
frontend/node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
88
frontend/node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
88
frontend/node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
||||
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||
export type Options = {
|
||||
file?: string | null;
|
||||
sourceRoot?: string | null;
|
||||
};
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
export declare class GenMapping {
|
||||
private _names;
|
||||
private _sources;
|
||||
private _sourcesContent;
|
||||
private _mappings;
|
||||
private _ignoreList;
|
||||
file: string | null | undefined;
|
||||
sourceRoot: string | null | undefined;
|
||||
constructor({ file, sourceRoot }?: Options);
|
||||
}
|
||||
/**
|
||||
* A low-level API to associate a generated position with an original source position. Line and
|
||||
* column here are 0-based, unlike `addMapping`.
|
||||
*/
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
|
||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
|
||||
/**
|
||||
* A high-level API to associate a generated position with an original source position. Line is
|
||||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||
*/
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source?: null;
|
||||
original?: null;
|
||||
name?: null;
|
||||
content?: null;
|
||||
}): void;
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name?: null;
|
||||
content?: string | null;
|
||||
}): void;
|
||||
export declare function addMapping(map: GenMapping, mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
content?: string | null;
|
||||
}): void;
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export declare const maybeAddSegment: typeof addSegment;
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export declare const maybeAddMapping: typeof addMapping;
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
|
||||
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
export declare function fromMap(input: SourceMapInput): GenMapping;
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
export declare function allMappings(map: GenMapping): Mapping[];
|
||||
32
frontend/node_modules/@jridgewell/gen-mapping/dist/types/set-array.d.ts
generated
vendored
Normal file
32
frontend/node_modules/@jridgewell/gen-mapping/dist/types/set-array.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
type Key = string | number | symbol;
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
export declare class SetArray<T extends Key = Key> {
|
||||
private _indexes;
|
||||
array: readonly T[];
|
||||
constructor();
|
||||
}
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
|
||||
/**
|
||||
* Removes the key, if it exists in the set.
|
||||
*/
|
||||
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
|
||||
export {};
|
||||
12
frontend/node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
12
frontend/node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
type GeneratedColumn = number;
|
||||
type SourcesIndex = number;
|
||||
type SourceLine = number;
|
||||
type SourceColumn = number;
|
||||
type NamesIndex = number;
|
||||
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
export declare const COLUMN = 0;
|
||||
export declare const SOURCES_INDEX = 1;
|
||||
export declare const SOURCE_LINE = 2;
|
||||
export declare const SOURCE_COLUMN = 3;
|
||||
export declare const NAMES_INDEX = 4;
|
||||
export {};
|
||||
43
frontend/node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
43
frontend/node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
export interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: readonly string[];
|
||||
sourceRoot?: string;
|
||||
sources: readonly (string | null)[];
|
||||
sourcesContent?: readonly (string | null)[];
|
||||
version: 3;
|
||||
ignoreList?: readonly number[];
|
||||
}
|
||||
export interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
export interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: readonly SourceMapSegment[][];
|
||||
}
|
||||
export interface Pos {
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
export interface OriginalPos extends Pos {
|
||||
source: string;
|
||||
}
|
||||
export interface BindingExpressionRange {
|
||||
start: Pos;
|
||||
expression: string;
|
||||
}
|
||||
export type Mapping = {
|
||||
generated: Pos;
|
||||
source: undefined;
|
||||
original: undefined;
|
||||
name: undefined;
|
||||
} | {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
} | {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: undefined;
|
||||
};
|
||||
67
frontend/node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
67
frontend/node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
{
|
||||
"name": "@jridgewell/gen-mapping",
|
||||
"version": "0.3.13",
|
||||
"description": "Generate source maps",
|
||||
"keywords": [
|
||||
"source",
|
||||
"map"
|
||||
],
|
||||
"main": "dist/gen-mapping.umd.js",
|
||||
"module": "dist/gen-mapping.mjs",
|
||||
"types": "types/gen-mapping.d.cts",
|
||||
"files": [
|
||||
"dist",
|
||||
"src",
|
||||
"types"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"import": {
|
||||
"types": "./types/gen-mapping.d.mts",
|
||||
"default": "./dist/gen-mapping.mjs"
|
||||
},
|
||||
"default": {
|
||||
"types": "./types/gen-mapping.d.cts",
|
||||
"default": "./dist/gen-mapping.umd.js"
|
||||
}
|
||||
},
|
||||
"./dist/gen-mapping.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:code benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||
"build": "run-s -n build:code build:types",
|
||||
"build:code": "node ../../esbuild.mjs gen-mapping.ts",
|
||||
"build:types": "run-s build:types:force build:types:emit build:types:mts",
|
||||
"build:types:force": "rimraf tsconfig.build.tsbuildinfo",
|
||||
"build:types:emit": "tsc --project tsconfig.build.json",
|
||||
"build:types:mts": "node ../../mts-types.mjs",
|
||||
"clean": "run-s -n clean:code clean:types",
|
||||
"clean:code": "tsc --build --clean tsconfig.build.json",
|
||||
"clean:types": "rimraf dist types",
|
||||
"test": "run-s -n test:types test:only test:format",
|
||||
"test:format": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:types": "eslint '{src,test}/**/*.ts'",
|
||||
"lint": "run-s -n lint:types lint:format",
|
||||
"lint:format": "npm run test:format -- --write",
|
||||
"lint:types": "npm run test:types -- --fix",
|
||||
"prepublishOnly": "npm run-s -n build test"
|
||||
},
|
||||
"homepage": "https://github.com/jridgewell/sourcemaps/tree/main/packages/gen-mapping",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/sourcemaps.git",
|
||||
"directory": "packages/gen-mapping"
|
||||
},
|
||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/sourcemap-codec": "^1.5.0",
|
||||
"@jridgewell/trace-mapping": "^0.3.24"
|
||||
}
|
||||
}
|
||||
614
frontend/node_modules/@jridgewell/gen-mapping/src/gen-mapping.ts
generated
vendored
Normal file
614
frontend/node_modules/@jridgewell/gen-mapping/src/gen-mapping.ts
generated
vendored
Normal file
@@ -0,0 +1,614 @@
|
||||
import { SetArray, put, remove } from './set-array';
|
||||
import {
|
||||
encode,
|
||||
// encodeGeneratedRanges,
|
||||
// encodeOriginalScopes
|
||||
} from '@jridgewell/sourcemap-codec';
|
||||
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
||||
|
||||
import {
|
||||
COLUMN,
|
||||
SOURCES_INDEX,
|
||||
SOURCE_LINE,
|
||||
SOURCE_COLUMN,
|
||||
NAMES_INDEX,
|
||||
} from './sourcemap-segment';
|
||||
|
||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||
// import type { OriginalScope, GeneratedRange } from '@jridgewell/sourcemap-codec';
|
||||
import type { SourceMapSegment } from './sourcemap-segment';
|
||||
import type {
|
||||
DecodedSourceMap,
|
||||
EncodedSourceMap,
|
||||
Pos,
|
||||
Mapping,
|
||||
// BindingExpressionRange,
|
||||
// OriginalPos,
|
||||
// OriginalScopeInfo,
|
||||
// GeneratedRangeInfo,
|
||||
} from './types';
|
||||
|
||||
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
||||
|
||||
export type Options = {
|
||||
file?: string | null;
|
||||
sourceRoot?: string | null;
|
||||
};
|
||||
|
||||
const NO_NAME = -1;
|
||||
|
||||
/**
|
||||
* Provides the state to generate a sourcemap.
|
||||
*/
|
||||
export class GenMapping {
|
||||
declare private _names: SetArray<string>;
|
||||
declare private _sources: SetArray<string>;
|
||||
declare private _sourcesContent: (string | null)[];
|
||||
declare private _mappings: SourceMapSegment[][];
|
||||
// private declare _originalScopes: OriginalScope[][];
|
||||
// private declare _generatedRanges: GeneratedRange[];
|
||||
declare private _ignoreList: SetArray<number>;
|
||||
declare file: string | null | undefined;
|
||||
declare sourceRoot: string | null | undefined;
|
||||
|
||||
constructor({ file, sourceRoot }: Options = {}) {
|
||||
this._names = new SetArray();
|
||||
this._sources = new SetArray();
|
||||
this._sourcesContent = [];
|
||||
this._mappings = [];
|
||||
// this._originalScopes = [];
|
||||
// this._generatedRanges = [];
|
||||
this.file = file;
|
||||
this.sourceRoot = sourceRoot;
|
||||
this._ignoreList = new SetArray();
|
||||
}
|
||||
}
|
||||
|
||||
interface PublicMap {
|
||||
_names: GenMapping['_names'];
|
||||
_sources: GenMapping['_sources'];
|
||||
_sourcesContent: GenMapping['_sourcesContent'];
|
||||
_mappings: GenMapping['_mappings'];
|
||||
// _originalScopes: GenMapping['_originalScopes'];
|
||||
// _generatedRanges: GenMapping['_generatedRanges'];
|
||||
_ignoreList: GenMapping['_ignoreList'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map: unknown): PublicMap {
|
||||
return map as any;
|
||||
}
|
||||
|
||||
/**
|
||||
* A low-level API to associate a generated position with an original source position. Line and
|
||||
* column here are 0-based, unlike `addMapping`.
|
||||
*/
|
||||
export function addSegment(
|
||||
map: GenMapping,
|
||||
genLine: number,
|
||||
genColumn: number,
|
||||
source?: null,
|
||||
sourceLine?: null,
|
||||
sourceColumn?: null,
|
||||
name?: null,
|
||||
content?: null,
|
||||
): void;
|
||||
export function addSegment(
|
||||
map: GenMapping,
|
||||
genLine: number,
|
||||
genColumn: number,
|
||||
source: string,
|
||||
sourceLine: number,
|
||||
sourceColumn: number,
|
||||
name?: null,
|
||||
content?: string | null,
|
||||
): void;
|
||||
export function addSegment(
|
||||
map: GenMapping,
|
||||
genLine: number,
|
||||
genColumn: number,
|
||||
source: string,
|
||||
sourceLine: number,
|
||||
sourceColumn: number,
|
||||
name: string,
|
||||
content?: string | null,
|
||||
): void;
|
||||
export function addSegment(
|
||||
map: GenMapping,
|
||||
genLine: number,
|
||||
genColumn: number,
|
||||
source?: string | null,
|
||||
sourceLine?: number | null,
|
||||
sourceColumn?: number | null,
|
||||
name?: string | null,
|
||||
content?: string | null,
|
||||
): void {
|
||||
return addSegmentInternal(
|
||||
false,
|
||||
map,
|
||||
genLine,
|
||||
genColumn,
|
||||
source,
|
||||
sourceLine,
|
||||
sourceColumn,
|
||||
name,
|
||||
content,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A high-level API to associate a generated position with an original source position. Line is
|
||||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
||||
*/
|
||||
export function addMapping(
|
||||
map: GenMapping,
|
||||
mapping: {
|
||||
generated: Pos;
|
||||
source?: null;
|
||||
original?: null;
|
||||
name?: null;
|
||||
content?: null;
|
||||
},
|
||||
): void;
|
||||
export function addMapping(
|
||||
map: GenMapping,
|
||||
mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name?: null;
|
||||
content?: string | null;
|
||||
},
|
||||
): void;
|
||||
export function addMapping(
|
||||
map: GenMapping,
|
||||
mapping: {
|
||||
generated: Pos;
|
||||
source: string;
|
||||
original: Pos;
|
||||
name: string;
|
||||
content?: string | null;
|
||||
},
|
||||
): void;
|
||||
export function addMapping(
|
||||
map: GenMapping,
|
||||
mapping: {
|
||||
generated: Pos;
|
||||
source?: string | null;
|
||||
original?: Pos | null;
|
||||
name?: string | null;
|
||||
content?: string | null;
|
||||
},
|
||||
): void {
|
||||
return addMappingInternal(false, map, mapping as Parameters<typeof addMappingInternal>[2]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
||||
* not add a segment with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export const maybeAddSegment: typeof addSegment = (
|
||||
map,
|
||||
genLine,
|
||||
genColumn,
|
||||
source,
|
||||
sourceLine,
|
||||
sourceColumn,
|
||||
name,
|
||||
content,
|
||||
) => {
|
||||
return addSegmentInternal(
|
||||
true,
|
||||
map,
|
||||
genLine,
|
||||
genColumn,
|
||||
source,
|
||||
sourceLine,
|
||||
sourceColumn,
|
||||
name,
|
||||
content,
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
||||
* not add a mapping with a lower generated line/column than one that came before.
|
||||
*/
|
||||
export const maybeAddMapping: typeof addMapping = (map, mapping) => {
|
||||
return addMappingInternal(true, map, mapping as Parameters<typeof addMappingInternal>[2]);
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds/removes the content of the source file to the source map.
|
||||
*/
|
||||
export function setSourceContent(map: GenMapping, source: string, content: string | null): void {
|
||||
const {
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast(map);
|
||||
const index = put(sources, source);
|
||||
sourcesContent[index] = content;
|
||||
// if (index === originalScopes.length) originalScopes[index] = [];
|
||||
}
|
||||
|
||||
export function setIgnore(map: GenMapping, source: string, ignore = true) {
|
||||
const {
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_ignoreList: ignoreList,
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast(map);
|
||||
const index = put(sources, source);
|
||||
if (index === sourcesContent.length) sourcesContent[index] = null;
|
||||
// if (index === originalScopes.length) originalScopes[index] = [];
|
||||
if (ignore) put(ignoreList, index);
|
||||
else remove(ignoreList, index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export function toDecodedMap(map: GenMapping): DecodedSourceMap {
|
||||
const {
|
||||
_mappings: mappings,
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_names: names,
|
||||
_ignoreList: ignoreList,
|
||||
// _originalScopes: originalScopes,
|
||||
// _generatedRanges: generatedRanges,
|
||||
} = cast(map);
|
||||
removeEmptyFinalLines(mappings);
|
||||
|
||||
return {
|
||||
version: 3,
|
||||
file: map.file || undefined,
|
||||
names: names.array,
|
||||
sourceRoot: map.sourceRoot || undefined,
|
||||
sources: sources.array,
|
||||
sourcesContent,
|
||||
mappings,
|
||||
// originalScopes,
|
||||
// generatedRanges,
|
||||
ignoreList: ignoreList.array,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
||||
* a sourcemap, or to JSON.stringify.
|
||||
*/
|
||||
export function toEncodedMap(map: GenMapping): EncodedSourceMap {
|
||||
const decoded = toDecodedMap(map);
|
||||
return Object.assign({}, decoded, {
|
||||
// originalScopes: decoded.originalScopes.map((os) => encodeOriginalScopes(os)),
|
||||
// generatedRanges: encodeGeneratedRanges(decoded.generatedRanges as GeneratedRange[]),
|
||||
mappings: encode(decoded.mappings as SourceMapSegment[][]),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
||||
*/
|
||||
export function fromMap(input: SourceMapInput): GenMapping {
|
||||
const map = new TraceMap(input);
|
||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
||||
|
||||
putAll(cast(gen)._names, map.names);
|
||||
putAll(cast(gen)._sources, map.sources as string[]);
|
||||
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
||||
cast(gen)._mappings = decodedMappings(map) as GenMapping['_mappings'];
|
||||
// TODO: implement originalScopes/generatedRanges
|
||||
if (map.ignoreList) putAll(cast(gen)._ignoreList, map.ignoreList);
|
||||
|
||||
return gen;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
||||
* passed to the `source-map` library.
|
||||
*/
|
||||
export function allMappings(map: GenMapping): Mapping[] {
|
||||
const out: Mapping[] = [];
|
||||
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
||||
|
||||
for (let i = 0; i < mappings.length; i++) {
|
||||
const line = mappings[i];
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const seg = line[j];
|
||||
|
||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
||||
let source: string | undefined = undefined;
|
||||
let original: Pos | undefined = undefined;
|
||||
let name: string | undefined = undefined;
|
||||
|
||||
if (seg.length !== 1) {
|
||||
source = sources.array[seg[SOURCES_INDEX]];
|
||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
||||
|
||||
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]];
|
||||
}
|
||||
|
||||
out.push({ generated, source, original, name } as Mapping);
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
||||
function addSegmentInternal<S extends string | null | undefined>(
|
||||
skipable: boolean,
|
||||
map: GenMapping,
|
||||
genLine: number,
|
||||
genColumn: number,
|
||||
source: S,
|
||||
sourceLine: S extends string ? number : null | undefined,
|
||||
sourceColumn: S extends string ? number : null | undefined,
|
||||
name: S extends string ? string | null | undefined : null | undefined,
|
||||
content: S extends string ? string | null | undefined : null | undefined,
|
||||
): void {
|
||||
const {
|
||||
_mappings: mappings,
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_names: names,
|
||||
// _originalScopes: originalScopes,
|
||||
} = cast(map);
|
||||
const line = getIndex(mappings, genLine);
|
||||
const index = getColumnIndex(line, genColumn);
|
||||
|
||||
if (!source) {
|
||||
if (skipable && skipSourceless(line, index)) return;
|
||||
return insert(line, index, [genColumn]);
|
||||
}
|
||||
|
||||
// Sigh, TypeScript can't figure out sourceLine and sourceColumn aren't nullish if source
|
||||
// isn't nullish.
|
||||
assert<number>(sourceLine);
|
||||
assert<number>(sourceColumn);
|
||||
|
||||
const sourcesIndex = put(sources, source);
|
||||
const namesIndex = name ? put(names, name) : NO_NAME;
|
||||
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content ?? null;
|
||||
// if (sourcesIndex === originalScopes.length) originalScopes[sourcesIndex] = [];
|
||||
|
||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
||||
return;
|
||||
}
|
||||
|
||||
return insert(
|
||||
line,
|
||||
index,
|
||||
name
|
||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn],
|
||||
);
|
||||
}
|
||||
|
||||
function assert<T>(_val: unknown): asserts _val is T {
|
||||
// noop.
|
||||
}
|
||||
|
||||
function getIndex<T>(arr: T[][], index: number): T[] {
|
||||
for (let i = arr.length; i <= index; i++) {
|
||||
arr[i] = [];
|
||||
}
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
function getColumnIndex(line: SourceMapSegment[], genColumn: number): number {
|
||||
let index = line.length;
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
const current = line[i];
|
||||
if (genColumn >= current[COLUMN]) break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
function insert<T>(array: T[], index: number, value: T) {
|
||||
for (let i = array.length; i > index; i--) {
|
||||
array[i] = array[i - 1];
|
||||
}
|
||||
array[index] = value;
|
||||
}
|
||||
|
||||
function removeEmptyFinalLines(mappings: SourceMapSegment[][]) {
|
||||
const { length } = mappings;
|
||||
let len = length;
|
||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
||||
if (mappings[i].length > 0) break;
|
||||
}
|
||||
if (len < length) mappings.length = len;
|
||||
}
|
||||
|
||||
function putAll<T extends string | number>(setarr: SetArray<T>, array: T[]) {
|
||||
for (let i = 0; i < array.length; i++) put(setarr, array[i]);
|
||||
}
|
||||
|
||||
function skipSourceless(line: SourceMapSegment[], index: number): boolean {
|
||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
||||
// doesn't generate any useful information.
|
||||
if (index === 0) return true;
|
||||
|
||||
const prev = line[index - 1];
|
||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
||||
// a sourceless position, which is useful.
|
||||
return prev.length === 1;
|
||||
}
|
||||
|
||||
function skipSource(
|
||||
line: SourceMapSegment[],
|
||||
index: number,
|
||||
sourcesIndex: number,
|
||||
sourceLine: number,
|
||||
sourceColumn: number,
|
||||
namesIndex: number,
|
||||
): boolean {
|
||||
// A source/named segment at the start of a line gives position at that genColumn
|
||||
if (index === 0) return false;
|
||||
|
||||
const prev = line[index - 1];
|
||||
|
||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
||||
if (prev.length === 1) return false;
|
||||
|
||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
||||
// provide any new position information.
|
||||
return (
|
||||
sourcesIndex === prev[SOURCES_INDEX] &&
|
||||
sourceLine === prev[SOURCE_LINE] &&
|
||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)
|
||||
);
|
||||
}
|
||||
|
||||
function addMappingInternal<S extends string | null | undefined>(
|
||||
skipable: boolean,
|
||||
map: GenMapping,
|
||||
mapping: {
|
||||
generated: Pos;
|
||||
source: S;
|
||||
original: S extends string ? Pos : null | undefined;
|
||||
name: S extends string ? string | null | undefined : null | undefined;
|
||||
content: S extends string ? string | null | undefined : null | undefined;
|
||||
},
|
||||
) {
|
||||
const { generated, source, original, name, content } = mapping;
|
||||
if (!source) {
|
||||
return addSegmentInternal(
|
||||
skipable,
|
||||
map,
|
||||
generated.line - 1,
|
||||
generated.column,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
);
|
||||
}
|
||||
assert<Pos>(original);
|
||||
return addSegmentInternal(
|
||||
skipable,
|
||||
map,
|
||||
generated.line - 1,
|
||||
generated.column,
|
||||
source as string,
|
||||
original.line - 1,
|
||||
original.column,
|
||||
name,
|
||||
content,
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
export function addOriginalScope(
|
||||
map: GenMapping,
|
||||
data: {
|
||||
start: Pos;
|
||||
end: Pos;
|
||||
source: string;
|
||||
kind: string;
|
||||
name?: string;
|
||||
variables?: string[];
|
||||
},
|
||||
): OriginalScopeInfo {
|
||||
const { start, end, source, kind, name, variables } = data;
|
||||
const {
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_originalScopes: originalScopes,
|
||||
_names: names,
|
||||
} = cast(map);
|
||||
const index = put(sources, source);
|
||||
if (index === sourcesContent.length) sourcesContent[index] = null;
|
||||
if (index === originalScopes.length) originalScopes[index] = [];
|
||||
|
||||
const kindIndex = put(names, kind);
|
||||
const scope: OriginalScope = name
|
||||
? [start.line - 1, start.column, end.line - 1, end.column, kindIndex, put(names, name)]
|
||||
: [start.line - 1, start.column, end.line - 1, end.column, kindIndex];
|
||||
if (variables) {
|
||||
scope.vars = variables.map((v) => put(names, v));
|
||||
}
|
||||
const len = originalScopes[index].push(scope);
|
||||
return [index, len - 1, variables];
|
||||
}
|
||||
*/
|
||||
|
||||
// Generated Ranges
|
||||
/*
|
||||
export function addGeneratedRange(
|
||||
map: GenMapping,
|
||||
data: {
|
||||
start: Pos;
|
||||
isScope: boolean;
|
||||
originalScope?: OriginalScopeInfo;
|
||||
callsite?: OriginalPos;
|
||||
},
|
||||
): GeneratedRangeInfo {
|
||||
const { start, isScope, originalScope, callsite } = data;
|
||||
const {
|
||||
_originalScopes: originalScopes,
|
||||
_sources: sources,
|
||||
_sourcesContent: sourcesContent,
|
||||
_generatedRanges: generatedRanges,
|
||||
} = cast(map);
|
||||
|
||||
const range: GeneratedRange = [
|
||||
start.line - 1,
|
||||
start.column,
|
||||
0,
|
||||
0,
|
||||
originalScope ? originalScope[0] : -1,
|
||||
originalScope ? originalScope[1] : -1,
|
||||
];
|
||||
if (originalScope?.[2]) {
|
||||
range.bindings = originalScope[2].map(() => [[-1]]);
|
||||
}
|
||||
if (callsite) {
|
||||
const index = put(sources, callsite.source);
|
||||
if (index === sourcesContent.length) sourcesContent[index] = null;
|
||||
if (index === originalScopes.length) originalScopes[index] = [];
|
||||
range.callsite = [index, callsite.line - 1, callsite.column];
|
||||
}
|
||||
if (isScope) range.isScope = true;
|
||||
generatedRanges.push(range);
|
||||
|
||||
return [range, originalScope?.[2]];
|
||||
}
|
||||
|
||||
export function setEndPosition(range: GeneratedRangeInfo, pos: Pos) {
|
||||
range[0][2] = pos.line - 1;
|
||||
range[0][3] = pos.column;
|
||||
}
|
||||
|
||||
export function addBinding(
|
||||
map: GenMapping,
|
||||
range: GeneratedRangeInfo,
|
||||
variable: string,
|
||||
expression: string | BindingExpressionRange,
|
||||
) {
|
||||
const { _names: names } = cast(map);
|
||||
const bindings = (range[0].bindings ||= []);
|
||||
const vars = range[1];
|
||||
|
||||
const index = vars!.indexOf(variable);
|
||||
const binding = getIndex(bindings, index);
|
||||
|
||||
if (typeof expression === 'string') binding[0] = [put(names, expression)];
|
||||
else {
|
||||
const { start } = expression;
|
||||
binding.push([put(names, expression.expression), start.line - 1, start.column]);
|
||||
}
|
||||
}
|
||||
*/
|
||||
82
frontend/node_modules/@jridgewell/gen-mapping/src/set-array.ts
generated
vendored
Normal file
82
frontend/node_modules/@jridgewell/gen-mapping/src/set-array.ts
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
type Key = string | number | symbol;
|
||||
|
||||
/**
|
||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
||||
* index of the `key` in the backing array.
|
||||
*
|
||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
||||
* and there are never duplicates.
|
||||
*/
|
||||
export class SetArray<T extends Key = Key> {
|
||||
declare private _indexes: Record<T, number | undefined>;
|
||||
declare array: readonly T[];
|
||||
|
||||
constructor() {
|
||||
this._indexes = { __proto__: null } as any;
|
||||
this.array = [];
|
||||
}
|
||||
}
|
||||
|
||||
interface PublicSet<T extends Key> {
|
||||
array: T[];
|
||||
_indexes: SetArray<T>['_indexes'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast<T extends Key>(set: SetArray<T>): PublicSet<T> {
|
||||
return set as any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
||||
*/
|
||||
export function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {
|
||||
return cast(setarr)._indexes[key];
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts `key` into the backing array, if it is not already present. Returns
|
||||
* the index of the `key` in the backing array.
|
||||
*/
|
||||
export function put<T extends Key>(setarr: SetArray<T>, key: T): number {
|
||||
// The key may or may not be present. If it is present, it's a number.
|
||||
const index = get(setarr, key);
|
||||
if (index !== undefined) return index;
|
||||
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
|
||||
const length = array.push(key);
|
||||
return (indexes[key] = length - 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pops the last added item out of the SetArray.
|
||||
*/
|
||||
export function pop<T extends Key>(setarr: SetArray<T>): void {
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
if (array.length === 0) return;
|
||||
|
||||
const last = array.pop()!;
|
||||
indexes[last] = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the key, if it exists in the set.
|
||||
*/
|
||||
export function remove<T extends Key>(setarr: SetArray<T>, key: T): void {
|
||||
const index = get(setarr, key);
|
||||
if (index === undefined) return;
|
||||
|
||||
const { array, _indexes: indexes } = cast(setarr);
|
||||
for (let i = index + 1; i < array.length; i++) {
|
||||
const k = array[i];
|
||||
array[i - 1] = k;
|
||||
indexes[k]!--;
|
||||
}
|
||||
indexes[key] = undefined;
|
||||
array.pop();
|
||||
}
|
||||
16
frontend/node_modules/@jridgewell/gen-mapping/src/sourcemap-segment.ts
generated
vendored
Normal file
16
frontend/node_modules/@jridgewell/gen-mapping/src/sourcemap-segment.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
type GeneratedColumn = number;
|
||||
type SourcesIndex = number;
|
||||
type SourceLine = number;
|
||||
type SourceColumn = number;
|
||||
type NamesIndex = number;
|
||||
|
||||
export type SourceMapSegment =
|
||||
| [GeneratedColumn]
|
||||
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn]
|
||||
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
|
||||
export const COLUMN = 0;
|
||||
export const SOURCES_INDEX = 1;
|
||||
export const SOURCE_LINE = 2;
|
||||
export const SOURCE_COLUMN = 3;
|
||||
export const NAMES_INDEX = 4;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user