diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..9caeae182903b954b7c3847494018617294d2b87
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,30 @@
+MIT License
+
+Copyright (c) 2024 Csaba BolyΓ³s (BladeSzaSza)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+---
+
+π Laban Movement Analysis - Complete Suite
+Created by: Csaba BolyΓ³s (BladeSzaSza)
+Contact: bladeszasza@gmail.com
+GitHub: https://github.com/bladeszasza
+LinkedIn: https://www.linkedin.com/in/csaba-bolyΓ³s-00a11767/
+Hugging Face: https://huggingface.co/BladeSzaSza
\ No newline at end of file
diff --git a/MCP_README.md b/MCP_README.md
new file mode 100644
index 0000000000000000000000000000000000000000..49ac317f96d3c53cd55973e31cef9f974edc64cf
--- /dev/null
+++ b/MCP_README.md
@@ -0,0 +1,704 @@
+# MCP & Agent Integration for Laban Movement Analysis
+
+This project provides comprehensive MCP (Model Context Protocol) integration and agent-ready APIs for professional movement analysis with pose estimation, AI action recognition, and automation capabilities.
+
+## π Quick Start
+
+### 1. Install All Dependencies
+
+```bash
+# Clone the repository
+git clone https://github.com/[your-repo]/labanmovementanalysis
+cd labanmovementanalysis
+
+# Install core dependencies
+pip install -r backend/requirements.txt
+
+# Install MCP and enhanced features
+pip install -r backend/requirements-mcp.txt
+```
+
+### 2. Start the MCP Server
+
+```bash
+# Start MCP server for AI assistants
+python -m backend.mcp_server
+```
+
+### 3. Configure Your AI Assistant
+
+Add to your Claude Desktop or other MCP-compatible assistant configuration:
+
+```json
+{
+ "mcpServers": {
+ "laban-movement-analysis": {
+ "command": "python",
+ "args": ["-m", "backend.mcp_server"],
+ "env": {
+ "PYTHONPATH": "/path/to/labanmovementanalysis"
+ }
+ }
+ }
+}
+```
+
+## π οΈ Enhanced MCP Tools
+
+### 1. `analyze_video`
+Comprehensive video analysis with enhanced features including SkateFormer AI and multiple pose models.
+
+**Parameters:**
+- `video_path` (string): Path or URL to video (supports YouTube, Vimeo, local files)
+- `model` (string, optional): Advanced pose model selection:
+ - **MediaPipe**: `mediapipe-lite`, `mediapipe-full`, `mediapipe-heavy`
+ - **MoveNet**: `movenet-lightning`, `movenet-thunder`
+ - **YOLO**: `yolo-v8-n/s/m/l`, `yolo-v11-n/s/m/l`
+
+- `enable_visualization` (boolean, optional): Generate annotated video
+- `include_keypoints` (boolean, optional): Include raw keypoint data
+- `use_skateformer` (boolean, optional): Enable AI action recognition
+
+**Examples:**
+```
+Analyze the dance video at https://youtube.com/watch?v=dQw4w9WgXcQ using SkateFormer AI
+Analyze movement in video.mp4 using yolo-v11-s model with visualization
+Process the exercise video with mediapipe-full and include keypoints
+```
+
+### 2. `get_analysis_summary`
+Get human-readable summaries with enhanced AI insights.
+
+**Parameters:**
+- `analysis_id` (string): ID from previous analysis
+
+**Enhanced Output Includes:**
+- SkateFormer action recognition results
+- Movement quality metrics (rhythm, complexity, symmetry)
+- Temporal action segmentation
+- Video source metadata (YouTube/Vimeo titles, etc.)
+
+**Example:**
+```
+Get a detailed summary of analysis dance_2024-01-01T12:00:00 including AI insights
+```
+
+### 3. `list_available_models`
+Comprehensive list of all 20+ pose estimation models with detailed specifications.
+
+**Enhanced Model Information:**
+- Performance characteristics (speed, accuracy, memory usage)
+- Recommended use cases (real-time, research, production)
+- Hardware requirements (CPU, GPU, memory)
+- Keypoint specifications (17 COCO, 33 MediaPipe)
+
+**Example:**
+```
+What pose estimation models are available for real-time processing?
+List all YOLO v11 model variants with their specifications
+```
+
+### 4. `batch_analyze`
+Enhanced batch processing with parallel execution and progress tracking.
+
+**Parameters:**
+- `video_paths` (array): List of video paths/URLs (supports mixed sources)
+- `model` (string, optional): Pose estimation model for all videos
+- `parallel` (boolean, optional): Enable parallel processing
+- `use_skateformer` (boolean, optional): Enable AI analysis for all videos
+- `output_format` (string, optional): Output format ("summary", "structured", "full")
+
+**Enhanced Features:**
+- Mixed source support (local files + YouTube URLs)
+- Progress tracking and partial results
+- Resource management and optimization
+- Failure recovery and retry logic
+
+**Examples:**
+```
+Analyze all dance videos in the playlist with SkateFormer AI
+Batch process exercise videos using yolo-v11-s with parallel execution
+```
+
+### 5. `compare_movements`
+Advanced movement comparison with AI-powered insights.
+
+**Parameters:**
+- `analysis_id1` (string): First analysis ID
+- `analysis_id2` (string): Second analysis ID
+- `comparison_type` (string, optional): Type of comparison ("basic", "detailed", "ai_enhanced")
+
+**Enhanced Comparison Features:**
+- SkateFormer action similarity analysis
+- Movement quality comparisons (rhythm, complexity, symmetry)
+- Temporal pattern matching
+- Statistical significance testing
+
+**Example:**
+```
+Compare the movement patterns between the two dance analyses with AI insights
+Detailed comparison of exercise form between beginner and expert videos
+```
+
+### 6. `real_time_analysis` (New)
+Start/stop real-time WebRTC analysis.
+
+**Parameters:**
+- `action` (string): "start" or "stop"
+- `model` (string, optional): Real-time optimized model
+- `stream_config` (object, optional): WebRTC configuration
+
+**Example:**
+```
+Start real-time movement analysis using mediapipe-lite
+```
+
+### 7. `filter_videos_advanced` (New)
+Advanced video filtering with AI-powered criteria.
+
+**Parameters:**
+- `video_paths` (array): List of video paths/URLs
+- `criteria` (object): Enhanced filtering criteria including:
+ - Traditional LMA metrics (direction, intensity, fluidity)
+ - SkateFormer actions (dancing, jumping, etc.)
+ - Movement qualities (rhythm, complexity, symmetry)
+ - Temporal characteristics (duration, segment count)
+
+**Example:**
+```
+Filter videos for high-energy dance movements with good rhythm
+Find exercise videos with proper form (high fluidity and symmetry)
+```
+
+## π€ Enhanced Agent API
+
+### Comprehensive Python Agent API
+
+```python
+from gradio_labanmovementanalysis import LabanMovementAnalysis
+from gradio_labanmovementanalysis.agent_api import (
+ LabanAgentAPI,
+ PoseModel,
+ MovementDirection,
+ MovementIntensity,
+ analyze_and_summarize
+)
+
+# Initialize with all features enabled
+analyzer = LabanMovementAnalysis(
+ enable_skateformer=True,
+ enable_webrtc=True,
+ enable_visualization=True
+)
+
+agent_api = LabanAgentAPI(analyzer=analyzer)
+```
+
+### Advanced Analysis Workflows
+
+```python
+# YouTube video analysis with AI
+result = agent_api.analyze(
+ "https://youtube.com/watch?v=...",
+ model=PoseModel.YOLO_V11_S,
+ use_skateformer=True,
+ generate_visualization=True
+)
+
+# Enhanced batch processing
+results = agent_api.batch_analyze(
+ ["video1.mp4", "https://youtube.com/watch?v=...", "https://vimeo.com/..."],
+ model=PoseModel.YOLO_V11_S,
+ parallel=True,
+ use_skateformer=True
+)
+
+# AI-powered movement filtering
+filtered = agent_api.filter_by_movement_advanced(
+ video_paths,
+ skateformer_actions=["dancing", "jumping"],
+ movement_qualities={"rhythm": 0.8, "complexity": 0.6},
+ traditional_criteria={
+ "direction": MovementDirection.UP,
+ "intensity": MovementIntensity.HIGH,
+ "min_fluidity": 0.7
+ }
+)
+
+# Real-time analysis control
+agent_api.start_realtime_analysis(model=PoseModel.MEDIAPIPE_LITE)
+live_metrics = agent_api.get_realtime_metrics()
+agent_api.stop_realtime_analysis()
+```
+
+### Enhanced Quick Functions
+
+```python
+from gradio_labanmovementanalysis import (
+ quick_analyze_enhanced,
+ analyze_and_summarize_with_ai,
+ compare_videos_detailed
+)
+
+# Enhanced analysis with AI
+data = quick_analyze_enhanced(
+ "https://youtube.com/watch?v=...",
+ model="yolo-v11-s",
+ use_skateformer=True
+)
+
+# AI-powered summary
+summary = analyze_and_summarize_with_ai(
+ "dance_video.mp4",
+ include_skateformer=True,
+ detail_level="comprehensive"
+)
+
+# Detailed video comparison
+comparison = compare_videos_detailed(
+ "video1.mp4",
+ "video2.mp4",
+ include_ai_analysis=True
+)
+```
+
+## π Enhanced Gradio 5 Agent Features
+
+### Comprehensive API Endpoints
+
+The unified Gradio 5 app exposes these endpoints optimized for agents:
+
+1. **`/analyze_standard`** - Basic LMA analysis
+2. **`/analyze_enhanced`** - Advanced analysis with all features
+3. **`/analyze_agent`** - Agent-optimized structured output
+4. **`/batch_analyze`** - Efficient multiple video processing
+5. **`/filter_videos`** - Movement-based filtering
+6. **`/compare_models`** - Model performance comparison
+7. **`/real_time_start`** - Start WebRTC real-time analysis
+8. **`/real_time_stop`** - Stop WebRTC real-time analysis
+
+### Enhanced Gradio Client Usage
+
+```python
+from gradio_client import Client
+
+# Connect to unified demo
+client = Client("http://localhost:7860")
+
+# Enhanced single analysis
+result = client.predict(
+ video_input="https://youtube.com/watch?v=...",
+ model="yolo-v11-s",
+ enable_viz=True,
+ use_skateformer=True,
+ include_keypoints=False,
+ api_name="/analyze_enhanced"
+)
+
+# Agent-optimized batch processing
+batch_results = client.predict(
+ files=["video1.mp4", "video2.mp4"],
+ model="yolo-v11-s",
+ api_name="/batch_analyze"
+)
+
+# Advanced movement filtering
+filtered_results = client.predict(
+ files=video_list,
+ direction_filter="up",
+ intensity_filter="high",
+ fluidity_threshold=0.7,
+ expansion_threshold=0.5,
+ api_name="/filter_videos"
+)
+
+# Model comparison analysis
+comparison = client.predict(
+ video="test_video.mp4",
+ model1="mediapipe-full",
+ model2="yolo-v11-s",
+ api_name="/compare_models"
+)
+```
+
+## π Enhanced Output Formats
+
+### AI-Enhanced Summary Format
+```
+π Movement Analysis Summary for "Dance Performance"
+Source: YouTube (10.5 seconds, 30fps)
+Model: YOLO-v11-S with SkateFormer AI
+
+π Traditional LMA Metrics:
+β’ Primary direction: up (65% of frames)
+β’ Movement intensity: high (80% of frames)
+β’ Average speed: fast (2.3 units/frame)
+β’ Fluidity score: 0.85/1.00 (very smooth)
+β’ Expansion score: 0.72/1.00 (moderately extended)
+
+π€ SkateFormer AI Analysis:
+β’ Detected actions: dancing (95% confidence), jumping (78% confidence)
+β’ Movement qualities:
+ - Rhythm: 0.89/1.00 (highly rhythmic)
+ - Complexity: 0.76/1.00 (moderately complex)
+ - Symmetry: 0.68/1.00 (slightly asymmetric)
+ - Smoothness: 0.85/1.00 (very smooth)
+ - Energy: 0.88/1.00 (high energy)
+
+β±οΈ Temporal Analysis:
+β’ 7 movement segments identified
+β’ Average segment duration: 1.5 seconds
+β’ Transition quality: smooth (0.82/1.00)
+
+π― Overall Assessment: Excellent dance performance with high energy,
+good rhythm, and smooth transitions. Slightly asymmetric but shows
+advanced movement complexity.
+```
+
+### Enhanced Structured Format
+```json
+{
+ "success": true,
+ "video_metadata": {
+ "source": "youtube",
+ "title": "Dance Performance",
+ "duration": 10.5,
+ "platform_id": "dQw4w9WgXcQ"
+ },
+ "model_info": {
+ "pose_model": "yolo-v11-s",
+ "ai_enhanced": true,
+ "skateformer_enabled": true
+ },
+ "lma_metrics": {
+ "direction": "up",
+ "intensity": "high",
+ "speed": "fast",
+ "fluidity": 0.85,
+ "expansion": 0.72
+ },
+ "skateformer_analysis": {
+ "actions": [
+ {"type": "dancing", "confidence": 0.95, "duration": 8.2},
+ {"type": "jumping", "confidence": 0.78, "duration": 2.3}
+ ],
+ "movement_qualities": {
+ "rhythm": 0.89,
+ "complexity": 0.76,
+ "symmetry": 0.68,
+ "smoothness": 0.85,
+ "energy": 0.88
+ },
+ "temporal_segments": 7,
+ "transition_quality": 0.82
+ },
+ "performance_metrics": {
+ "processing_time": 12.3,
+ "frames_analyzed": 315,
+ "keypoints_detected": 24
+ }
+}
+```
+
+### Comprehensive JSON Format
+Complete analysis including frame-by-frame data, SkateFormer attention maps, movement trajectories, and statistical summaries.
+
+## ποΈ Enhanced Architecture
+
+```
+βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
+β AI Assistant Integration β
+β (Claude, GPT, Local Models via MCP) β
+βββββββββββββββββββββββ¬ββββββββββββββββββββββββββββββββββββββββ
+ β
+βββββββββββββββββββββββΌββββββββββββββββββββββββββββββββββββββββ
+β MCP Server β
+β βββββββββββββββ βββββββββββββββ ββββββββββββββββββββββββββββ
+β β Video β β Enhanced β β Real-time ββ
+β β Analysis β β Batch β β WebRTC ββ
+β β Tools β β Processing β β Analysis ββ
+β βββββββββββββββ βββββββββββββββ ββββββββββββββββββββββββββββ
+βββββββββββββββββββββββ¬ββββββββββββββββββββββββββββββββββββββββ
+ β
+βββββββββββββββββββββββΌββββββββββββββββββββββββββββββββββββββββ
+β Enhanced Agent API Layer β
+β βββββββββββββββ βββββββββββββββ ββββββββββββββββββββββββββββ
+β β Movement β β AI-Enhanced β β Advanced ββ
+β β Filtering β β Comparisons β β Workflows ββ
+β βββββββββββββββ βββββββββββββββ ββββββββββββββββββββββββββββ
+βββββββββββββββββββββββ¬ββββββββββββββββββββββββββββββββββββββββ
+ β
+βββββββββββββββββββββββΌββββββββββββββββββββββββββββββββββββββββ
+β Core Analysis Engine β
+β β
+β πΉ Video Input π€ Pose Models π SkateFormer AI β
+β βββββββββββββββ βββββββββββββββ ββββββββββββββββββββββββ
+β βLocal Files β βMediaPipe(3) β β Action Recognition ββ
+β βYouTube URLs β βMoveNet(2) β βMovement Qualities ββ
+β βVimeo URLs β βYOLO(8) β βTemporal Segments ββ
+β βDirect URLs β β β βAttention Analysis ββ
+β βββββββββββββββ βββββββββββββββ ββββββββββββββββββββββββ
+β β
+β π LMA Engine πΉ WebRTC π¨ Visualization β
+β βββββββββββββββ βββββββββββββββ ββββββββββββββββββββββββ
+β βDirection β βLive Camera β β Pose Overlays ββ
+β βIntensity β βReal-time β β Motion Trails ββ
+β βSpeed/Flow β βSub-100ms β β Metric Displays ββ
+β βExpansion β βAdaptive FPS β β AI Visualizations ββ
+β βββββββββββββββ βββββββββββββββ ββββββββββββββββββββββββ
+βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
+```
+
+## π Advanced Agent Workflows
+
+### 1. Comprehensive Dance Analysis Pipeline
+```python
+# Multi-source dance video analysis
+videos = [
+ "local_dance.mp4",
+ "https://youtube.com/watch?v=dance1",
+ "https://vimeo.com/dance2"
+]
+
+# Batch analyze with AI
+results = agent_api.batch_analyze(
+ videos,
+ model=PoseModel.YOLO_V11_S,
+ use_skateformer=True,
+ parallel=True
+)
+
+# Filter for high-quality performances
+excellent_dances = agent_api.filter_by_movement_advanced(
+ videos,
+ skateformer_actions=["dancing"],
+ movement_qualities={
+ "rhythm": 0.8,
+ "complexity": 0.7,
+ "energy": 0.8
+ },
+ traditional_criteria={
+ "intensity": MovementIntensity.HIGH,
+ "min_fluidity": 0.75
+ }
+)
+
+# Generate comprehensive report
+report = agent_api.generate_analysis_report(
+ results,
+ include_comparisons=True,
+ include_recommendations=True
+)
+```
+
+### 2. Real-time Exercise Form Checker
+```python
+# Start real-time analysis
+agent_api.start_realtime_analysis(
+ model=PoseModel.MEDIAPIPE_FULL,
+ enable_skateformer=True
+)
+
+# Monitor form in real-time
+while exercise_in_progress:
+ metrics = agent_api.get_realtime_metrics()
+
+ # Check form quality
+ if metrics["fluidity"] < 0.6:
+ send_feedback("Improve movement smoothness")
+
+ if metrics["symmetry"] < 0.7:
+ send_feedback("Balance left and right movements")
+
+ time.sleep(0.1) # 10Hz monitoring
+
+# Stop and get session summary
+agent_api.stop_realtime_analysis()
+session_summary = agent_api.get_session_summary()
+```
+
+### 3. Movement Pattern Research Workflow
+```python
+# Large-scale analysis for research
+research_videos = get_research_dataset()
+
+# Batch process with comprehensive analysis
+results = agent_api.batch_analyze(
+ research_videos,
+ model=PoseModel.YOLO_V11_L, # High accuracy for research
+ use_skateformer=True,
+ include_keypoints=True, # Full data for research
+ parallel=True
+)
+
+# Statistical analysis
+patterns = agent_api.extract_movement_patterns(
+ results,
+ pattern_types=["temporal", "spatial", "quality"],
+ clustering_method="hierarchical"
+)
+
+# Generate research insights
+insights = agent_api.generate_research_insights(
+ patterns,
+ include_visualizations=True,
+ statistical_tests=True
+)
+```
+
+## π§ Advanced Configuration & Customization
+
+### Environment Variables
+
+```bash
+# Core configuration
+export LABAN_DEFAULT_MODEL="mediapipe-full"
+export LABAN_CACHE_DIR="/path/to/cache"
+export LABAN_MAX_WORKERS=4
+
+# Enhanced features
+export LABAN_ENABLE_SKATEFORMER=true
+export LABAN_ENABLE_WEBRTC=true
+export LABAN_SKATEFORMER_MODEL_PATH="/path/to/skateformer"
+
+# Performance tuning
+export LABAN_GPU_ENABLED=true
+export LABAN_BATCH_SIZE=8
+export LABAN_REALTIME_FPS=30
+
+# Video download configuration
+export LABAN_YOUTUBE_QUALITY="720p"
+export LABAN_MAX_DOWNLOAD_SIZE="500MB"
+export LABAN_TEMP_DIR="/tmp/laban_downloads"
+```
+
+### Custom MCP Tools
+
+```python
+# Add custom MCP tool
+from backend.mcp_server import server
+
+@server.tool("custom_movement_analysis")
+async def custom_analysis(
+ video_path: str,
+ custom_params: dict
+) -> dict:
+ """Custom movement analysis with specific parameters."""
+ # Your custom implementation
+ return results
+
+# Register enhanced filters
+@server.tool("filter_by_sport_type")
+async def filter_by_sport(
+ videos: list,
+ sport_type: str
+) -> dict:
+ """Filter videos by detected sport type using SkateFormer."""
+ # Implementation using SkateFormer sport classification
+ return filtered_videos
+```
+
+### WebRTC Configuration
+
+```python
+# Custom WebRTC configuration
+webrtc_config = {
+ "video_constraints": {
+ "width": 1280,
+ "height": 720,
+ "frameRate": 30
+ },
+ "processing_config": {
+ "max_latency_ms": 100,
+ "quality_adaptation": True,
+ "model_switching": True
+ }
+}
+
+agent_api.configure_webrtc(webrtc_config)
+```
+
+## π€ Contributing to Agent Features
+
+### Adding New MCP Tools
+
+1. Define tool in `backend/mcp_server.py`
+2. Implement core logic in agent API
+3. Add comprehensive documentation
+4. Include usage examples
+5. Write integration tests
+
+### Extending Agent API
+
+1. Add methods to `LabanAgentAPI` class
+2. Ensure compatibility with existing workflows
+3. Add structured output formats
+4. Include error handling and validation
+5. Update documentation
+
+### Enhancing SkateFormer Integration
+
+1. Extend action recognition types
+2. Add custom movement quality metrics
+3. Implement temporal analysis features
+4. Add visualization components
+5. Validate with research datasets
+
+## π Resources & References
+
+- [MCP Specification](https://github.com/anthropics/mcp)
+- [SkateFormer Research Paper](https://kaist-viclab.github.io/SkateFormer_site/)
+- [Gradio 5 Documentation](https://www.gradio.app/docs)
+- [Unified Demo Application](demo/app.py)
+- [Core Component Code](backend/gradio_labanmovementanalysis/)
+
+## π― Production Deployment
+
+### Docker Deployment
+
+```dockerfile
+FROM python:3.9-slim
+
+COPY . /app
+WORKDIR /app
+
+RUN pip install -r backend/requirements.txt
+RUN pip install -r backend/requirements-mcp.txt
+
+EXPOSE 7860 8080
+
+CMD ["python", "-m", "backend.mcp_server"]
+```
+
+### Kubernetes Configuration
+
+```yaml
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: laban-mcp-server
+spec:
+ replicas: 3
+ selector:
+ matchLabels:
+ app: laban-mcp
+ template:
+ metadata:
+ labels:
+ app: laban-mcp
+ spec:
+ containers:
+ - name: mcp-server
+ image: laban-movement-analysis:latest
+ ports:
+ - containerPort: 8080
+ env:
+ - name: LABAN_MAX_WORKERS
+ value: "2"
+ - name: LABAN_ENABLE_SKATEFORMER
+ value: "true"
+```
+
+---
+
+**π€ Transform your AI assistant into a movement analysis expert with comprehensive MCP integration and agent-ready automation.**
\ No newline at end of file
diff --git a/README.md b/README.md
index 59581e2ed393bd0a5cd44d73880baedbc8021025..bf4527c73b1b16d30e3ba31989693639dcab9f5e 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,1144 @@
---
-title: Laban Movement Analysis
-emoji: π»
+title: Laban Movement Analysis - Complete Suite
+emoji: π
colorFrom: blue
-colorTo: blue
+colorTo: green
sdk: gradio
-sdk_version: 5.32.1
-app_file: app.py
+sdk_version: 5.0.0
+app_file: space.py
pinned: false
-license: apache-2.0
-short_description: Professional movement analysis from pose estimation
+license: mit
+tags:
+ - laban-movement-analysis
+ - pose-estimation
+ - movement-analysis
+ - video-analysis
+ - webrtc
+ - youtube
+ - vimeo
+ - mcp
+ - agent-ready
+ - computer-vision
+ - mediapipe
+ - yolo
+ - gradio
+short_description: Professional movement analysis with pose estimation and AI
---
-Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
+# `gradio_labanmovementanalysis`
+
+
+A Gradio 5 component for video movement analysis using Laban Movement Analysis (LMA) with MCP support for AI agents
+
+## Installation
+
+```bash
+pip install gradio_labanmovementanalysis
+```
+
+## Usage
+
+```python
+"""
+Unified Laban Movement Analysis Demo
+Comprehensive interface combining all features:
+- Standard LMA analysis
+- Enhanced features (WebRTC, YouTube/Vimeo)
+- Agent API (batch processing, filtering)
+- Real-time analysis
+- Model comparison
+
+Created by: Csaba BolyΓ³s (BladeSzaSza)
+Contact: bladeszasza@gmail.com
+GitHub: https://github.com/bladeszasza
+LinkedIn: https://www.linkedin.com/in/csaba-bolyΓ³s-00a11767/
+Hugging Face: https://huggingface.co/BladeSzaSza
+
+Heavy Beta Version - Under Active Development
+"""
+
+import gradio as gr
+import sys
+from pathlib import Path
+from typing import Dict, Any, List, Tuple
+
+# Add parent directory to path
+sys.path.insert(0, str(Path(__file__).parent.parent / "backend"))
+
+from gradio_labanmovementanalysis import LabanMovementAnalysis
+
+# Import agent API if available
+try:
+ from gradio_labanmovementanalysis.agent_api import (
+ LabanAgentAPI,
+ PoseModel,
+ MovementDirection,
+ MovementIntensity
+ )
+ HAS_AGENT_API = True
+except ImportError:
+ HAS_AGENT_API = False
+
+# Import WebRTC components if available
+try:
+ from gradio_webrtc import WebRTC
+ from gradio_labanmovementanalysis.webrtc_handler import (
+ webrtc_detection,
+ get_rtc_configuration
+ )
+ HAS_WEBRTC = True
+except ImportError as e:
+ print(f"WebRTC import failed: {e}")
+ HAS_WEBRTC = False
+
+# Initialize components
+try:
+ # Initialize with WebRTC support
+ analyzer = LabanMovementAnalysis(
+ enable_webrtc=True,
+ enable_visualization=True
+ )
+ print("β
Core features initialized successfully")
+except Exception as e:
+ print(f"Warning: Some features may not be available: {e}")
+ analyzer = LabanMovementAnalysis(enable_webrtc=False)
+
+# Initialize agent API if available
+agent_api = None
+if HAS_AGENT_API:
+ try:
+ agent_api = LabanAgentAPI()
+ except Exception as e:
+ print(f"Warning: Agent API not available: {e}")
+ agent_api = None
+
+
+def process_video_standard(video, model, enable_viz, include_keypoints):
+ """Standard video processing function."""
+ if video is None:
+ return None, None
+
+ try:
+ json_output, video_output = analyzer.process_video(
+ video,
+ model=model,
+ enable_visualization=enable_viz,
+ include_keypoints=include_keypoints
+ )
+ return json_output, video_output
+ except Exception as e:
+ return {"error": str(e)}, None
+
+
+def process_video_enhanced(video_input, model, enable_viz, include_keypoints):
+ """Enhanced video processing with all new features."""
+ if not video_input:
+ return {"error": "No video provided"}, None
+
+ try:
+ # Handle both file upload and URL input
+ video_path = video_input.name if hasattr(video_input, 'name') else video_input
+
+ json_result, viz_result = analyzer.process_video(
+ video_path,
+ model=model,
+ enable_visualization=enable_viz,
+ include_keypoints=include_keypoints
+ )
+ return json_result, viz_result
+ except Exception as e:
+ error_result = {"error": str(e)}
+ return error_result, None
+
+
+def process_video_for_agent(video, model, output_format="summary"):
+ """Process video with agent-friendly output format."""
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not video:
+ return {"error": "No video provided"}
+
+ try:
+ model_enum = PoseModel(model)
+ result = agent_api.analyze(video, model=model_enum, generate_visualization=False)
+
+ if output_format == "summary":
+ return {"summary": agent_api.get_movement_summary(result)}
+ elif output_format == "structured":
+ return {
+ "success": result.success,
+ "direction": result.dominant_direction.value,
+ "intensity": result.dominant_intensity.value,
+ "speed": result.dominant_speed,
+ "fluidity": result.fluidity_score,
+ "expansion": result.expansion_score,
+ "segments": len(result.movement_segments)
+ }
+ else: # json
+ return result.raw_data
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def batch_process_videos(files, model):
+ """Process multiple videos in batch."""
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not files:
+ return {"error": "No videos provided"}
+
+ try:
+ video_paths = [f.name for f in files]
+ results = agent_api.batch_analyze(video_paths, model=PoseModel(model), parallel=True)
+
+ output = {
+ "total_videos": len(results),
+ "successful": sum(1 for r in results if r.success),
+ "failed": sum(1 for r in results if not r.success),
+ "results": []
+ }
+
+ for result in results:
+ output["results"].append({
+ "video": Path(result.video_path).name,
+ "success": result.success,
+ "summary": agent_api.get_movement_summary(result) if result.success else result.error
+ })
+
+ return output
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def filter_videos_by_movement(files, direction, intensity, min_fluidity, min_expansion):
+ """Filter videos based on movement characteristics."""
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not files:
+ return {"error": "No videos provided"}
+
+ try:
+ video_paths = [f.name for f in files]
+
+ dir_filter = MovementDirection(direction) if direction != "any" else None
+ int_filter = MovementIntensity(intensity) if intensity != "any" else None
+
+ filtered = agent_api.filter_by_movement(
+ video_paths,
+ direction=dir_filter,
+ intensity=int_filter,
+ min_fluidity=min_fluidity if min_fluidity > 0 else None,
+ min_expansion=min_expansion if min_expansion > 0 else None
+ )
+
+ return {
+ "total_analyzed": len(video_paths),
+ "matching_videos": len(filtered),
+ "matches": [
+ {
+ "video": Path(r.video_path).name,
+ "direction": r.dominant_direction.value,
+ "intensity": r.dominant_intensity.value,
+ "fluidity": r.fluidity_score,
+ "expansion": r.expansion_score
+ }
+ for r in filtered
+ ]
+ }
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def compare_models(video, model1, model2):
+ """Compare two different pose models on the same video."""
+ if not video:
+ return "No video provided"
+
+ try:
+ # Analyze with both models
+ result1, _ = analyzer.process_video(video, model=model1, enable_visualization=False)
+ result2, _ = analyzer.process_video(video, model=model2, enable_visualization=False)
+
+ # Extract key metrics for comparison
+ def extract_metrics(result):
+ summary = result.get("movement_analysis", {}).get("summary", {})
+ return {
+ "direction": summary.get("direction", {}).get("dominant", "unknown"),
+ "intensity": summary.get("intensity", {}).get("dominant", "unknown"),
+ "speed": summary.get("speed", {}).get("dominant", "unknown"),
+ "frame_count": result.get("video_info", {}).get("frame_count", 0)
+ }
+
+ metrics1 = extract_metrics(result1)
+ metrics2 = extract_metrics(result2)
+
+ # Create comparison table data
+ comparison_data = [
+ ["Direction", metrics1["direction"], metrics2["direction"],
+ "β" if metrics1["direction"] == metrics2["direction"] else "β"],
+ ["Intensity", metrics1["intensity"], metrics2["intensity"],
+ "β" if metrics1["intensity"] == metrics2["intensity"] else "β"],
+ ["Speed", metrics1["speed"], metrics2["speed"],
+ "β" if metrics1["speed"] == metrics2["speed"] else "β"],
+ ["Frames Processed", str(metrics1["frame_count"]), str(metrics2["frame_count"]),
+ "β" if metrics1["frame_count"] == metrics2["frame_count"] else "β"]
+ ]
+
+ return comparison_data
+
+ except Exception as e:
+ return [["Error", str(e), "", ""]]
+
+
+def start_webrtc_stream(model):
+ """Start WebRTC real-time analysis."""
+ try:
+ success = analyzer.start_webrtc_stream(model)
+ if success:
+ return "π’ Stream Active", {"status": "streaming", "model": model}
+ else:
+ return "π΄ Failed to start", {"status": "error"}
+ except Exception as e:
+ return f"π΄ Error: {str(e)}", {"status": "error"}
+
+
+def stop_webrtc_stream():
+ """Stop WebRTC real-time analysis."""
+ try:
+ success = analyzer.stop_webrtc_stream()
+ if success:
+ return "π‘ Stream Stopped", {"status": "stopped"}
+ else:
+ return "π΄ Failed to stop", {"status": "error"}
+ except Exception as e:
+ return f"π΄ Error: {str(e)}", {"status": "error"}
+
+
+def create_unified_demo():
+ """Create the unified comprehensive demo."""
+
+ with gr.Blocks(
+ title="Laban Movement Analysis - Complete Suite by Csaba BolyΓ³s",
+ theme=gr.themes.Soft(),
+ css="""
+ .main-header {
+ background: linear-gradient(135deg, #40826D 0%, #2E5E4A 50%, #1B3A2F 100%);
+ color: white;
+ padding: 30px;
+ border-radius: 10px;
+ margin-bottom: 20px;
+ text-align: center;
+ }
+ .feature-card {
+ border: 1px solid #e1e5e9;
+ border-radius: 8px;
+ padding: 16px;
+ margin: 8px 0;
+ background: #f8f9fa;
+ }
+ .json-output {
+ max-height: 600px;
+ overflow-y: auto;
+ font-family: monospace;
+ font-size: 12px;
+ }
+ .author-info {
+ background: linear-gradient(135deg, #40826D 0%, #2E5E4A 100%);
+ color: white;
+ padding: 15px;
+ border-radius: 8px;
+ margin: 10px 0;
+ text-align: center;
+ }
+ """
+ ) as demo:
+
+ # Main Header
+ gr.HTML("""
+
+
π Laban Movement Analysis - Complete Suite
+
+ Professional movement analysis with pose estimation, AI action recognition,
+ real-time processing, and agent automation
+
+
+ Supports YouTube/Vimeo URLs β’ WebRTC Streaming β’ 20+ Pose Models β’ MCP Integration
+
+
+ Version 0.01-beta - Heavy Beta Under Active Development
+
+
+ """)
+
+ with gr.Tabs():
+ # Tab 1: Standard Analysis
+ with gr.Tab("π¬ Standard Analysis"):
+ gr.Markdown("""
+ ### Classic Laban Movement Analysis
+ Upload a video file to analyze movement using traditional LMA metrics with pose estimation.
+ """)
+
+ with gr.Row():
+ with gr.Column(scale=1):
+ video_input_std = gr.Video(
+ label="Upload Video",
+ sources=["upload"],
+ format="mp4"
+ )
+
+ model_dropdown_std = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Pose Estimation Model"
+ )
+
+ with gr.Row():
+ enable_viz_std = gr.Checkbox(
+ value=True,
+ label="Generate Visualization"
+ )
+
+ include_keypoints_std = gr.Checkbox(
+ value=False,
+ label="Include Keypoints"
+ )
+
+ process_btn_std = gr.Button("Analyze Movement", variant="primary")
+
+ gr.Examples(
+ examples=[
+ ["examples/balette.mov"],
+ ["examples/balette.mp4"],
+ ],
+ inputs=video_input_std,
+ label="Example Videos"
+ )
+
+ with gr.Column(scale=2):
+ with gr.Tab("Analysis Results"):
+ json_output_std = gr.JSON(
+ label="Movement Analysis (JSON)",
+ elem_classes=["json-output"]
+ )
+
+ with gr.Tab("Visualization"):
+ video_output_std = gr.Video(
+ label="Annotated Video",
+ format="mp4"
+ )
+
+ gr.Markdown("""
+ **Visualization Guide:**
+ - 𦴠**Skeleton**: Pose keypoints and connections
+ - π **Trails**: Motion history (fading lines)
+ - β‘οΈ **Arrows**: Movement direction indicators
+ - π¨ **Colors**: Green (low) β Orange (medium) β Red (high) intensity
+ """)
+
+ process_btn_std.click(
+ fn=process_video_standard,
+ inputs=[video_input_std, model_dropdown_std, enable_viz_std, include_keypoints_std],
+ outputs=[json_output_std, video_output_std],
+ api_name="analyze_standard"
+ )
+
+ # Tab 2: Enhanced Analysis
+ with gr.Tab("π Enhanced Analysis"):
+ gr.Markdown("""
+ ### Advanced Analysis with AI and URL Support
+ Analyze videos from URLs (YouTube/Vimeo), use advanced pose models, and get AI-powered insights.
+ """)
+
+ with gr.Row():
+ with gr.Column(scale=1):
+ gr.HTML('')
+ gr.Markdown("**Video Input**")
+
+ # Changed from textbox to file upload as requested
+ video_input_enh = gr.File(
+ label="Upload Video or Drop File",
+ file_types=["video"],
+ type="filepath"
+ )
+
+ # URL input option
+ url_input_enh = gr.Textbox(
+ label="Or Enter Video URL",
+ placeholder="YouTube URL, Vimeo URL, or direct video URL",
+ info="Leave file upload empty to use URL"
+ )
+
+ gr.Examples(
+ examples=[
+ ["examples/balette.mov"],
+ ["https://www.youtube.com/shorts/RX9kH2l3L8U"],
+ ["https://vimeo.com/815392738"]
+ ],
+ inputs=url_input_enh,
+ label="Example URLs"
+ )
+
+ gr.Markdown("**Model Selection**")
+
+ model_select_enh = gr.Dropdown(
+ choices=[
+ # MediaPipe variants
+ "mediapipe-lite", "mediapipe-full", "mediapipe-heavy",
+ # MoveNet variants
+ "movenet-lightning", "movenet-thunder",
+ # YOLO variants (added X models)
+ "yolo-v8-n", "yolo-v8-s", "yolo-v8-m", "yolo-v8-l", "yolo-v8-x",
+ # YOLO v11 variants
+ "yolo-v11-n", "yolo-v11-s", "yolo-v11-m", "yolo-v11-l", "yolo-v11-x"
+ ],
+ value="mediapipe-full",
+ label="Advanced Pose Models",
+ info="17+ model variants available"
+ )
+
+ gr.Markdown("**Analysis Options**")
+
+ with gr.Row():
+ enable_viz_enh = gr.Checkbox(value=True, label="Visualization")
+
+ with gr.Row():
+ include_keypoints_enh = gr.Checkbox(value=False, label="Raw Keypoints")
+
+ analyze_btn_enh = gr.Button("π Enhanced Analysis", variant="primary", size="lg")
+ gr.HTML('
')
+
+ with gr.Column(scale=2):
+ with gr.Tab("π Analysis"):
+ analysis_output_enh = gr.JSON(label="Enhanced Analysis Results")
+
+ with gr.Tab("π₯ Visualization"):
+ viz_output_enh = gr.Video(label="Annotated Video")
+
+ def process_enhanced_input(file_input, url_input, model, enable_viz, include_keypoints):
+ """Process either file upload or URL input."""
+ video_source = file_input if file_input else url_input
+ return process_video_enhanced(video_source, model, enable_viz, include_keypoints)
+
+ analyze_btn_enh.click(
+ fn=process_enhanced_input,
+ inputs=[video_input_enh, url_input_enh, model_select_enh, enable_viz_enh, include_keypoints_enh],
+ outputs=[analysis_output_enh, viz_output_enh],
+ api_name="analyze_enhanced"
+ )
+
+ # Tab 3: Agent API
+ with gr.Tab("π€ Agent API"):
+ gr.Markdown("""
+ ### AI Agent & Automation Features
+ Batch processing, filtering, and structured outputs designed for AI agents and automation.
+ """)
+
+ with gr.Tabs():
+ with gr.Tab("Single Analysis"):
+ with gr.Row():
+ with gr.Column():
+ video_input_agent = gr.Video(label="Upload Video", sources=["upload"])
+ model_select_agent = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Model"
+ )
+ output_format_agent = gr.Radio(
+ choices=["summary", "structured", "json"],
+ value="summary",
+ label="Output Format"
+ )
+ analyze_btn_agent = gr.Button("Analyze", variant="primary")
+
+ with gr.Column():
+ output_display_agent = gr.JSON(label="Agent Output")
+
+ analyze_btn_agent.click(
+ fn=process_video_for_agent,
+ inputs=[video_input_agent, model_select_agent, output_format_agent],
+ outputs=output_display_agent,
+ api_name="analyze_agent"
+ )
+
+ with gr.Tab("Batch Processing"):
+ with gr.Row():
+ with gr.Column():
+ batch_files = gr.File(
+ label="Upload Multiple Videos",
+ file_count="multiple",
+ file_types=["video"]
+ )
+ batch_model = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Model"
+ )
+ batch_btn = gr.Button("Process Batch", variant="primary")
+
+ with gr.Column():
+ batch_output = gr.JSON(label="Batch Results")
+
+ batch_btn.click(
+ fn=batch_process_videos,
+ inputs=[batch_files, batch_model],
+ outputs=batch_output,
+ api_name="batch_analyze"
+ )
+
+ with gr.Tab("Movement Filter"):
+ with gr.Row():
+ with gr.Column():
+ filter_files = gr.File(
+ label="Videos to Filter",
+ file_count="multiple",
+ file_types=["video"]
+ )
+
+ with gr.Group():
+ direction_filter = gr.Dropdown(
+ choices=["any", "up", "down", "left", "right", "stationary"],
+ value="any",
+ label="Direction Filter"
+ )
+ intensity_filter = gr.Dropdown(
+ choices=["any", "low", "medium", "high"],
+ value="any",
+ label="Intensity Filter"
+ )
+ fluidity_threshold = gr.Slider(0.0, 1.0, 0.0, label="Min Fluidity")
+ expansion_threshold = gr.Slider(0.0, 1.0, 0.0, label="Min Expansion")
+
+ filter_btn = gr.Button("Apply Filters", variant="primary")
+
+ with gr.Column():
+ filter_output = gr.JSON(label="Filtered Results")
+
+ filter_btn.click(
+ fn=filter_videos_by_movement,
+ inputs=[filter_files, direction_filter, intensity_filter,
+ fluidity_threshold, expansion_threshold],
+ outputs=filter_output,
+ api_name="filter_videos"
+ )
+
+ # Tab 4: Real-time WebRTC
+ with gr.Tab("πΉ Real-time Analysis"):
+ gr.Markdown("""
+ ### Live Camera Movement Analysis
+ Real-time pose detection and movement analysis from your webcam using WebRTC.
+ **Grant camera permissions when prompted for best experience.**
+ """)
+
+ # Official Gradio WebRTC approach (compatible with NumPy 1.x)
+ if HAS_WEBRTC:
+
+ # Get RTC configuration
+ rtc_config = get_rtc_configuration()
+
+ # Custom CSS following official guide
+ css_webrtc = """
+ .my-group {max-width: 480px !important; max-height: 480px !important;}
+ .my-column {display: flex !important; justify-content: center !important; align-items: center !important;}
+ """
+
+ with gr.Column(elem_classes=["my-column"]):
+ with gr.Group(elem_classes=["my-group"]):
+ # Official WebRTC Component
+ webrtc_stream = WebRTC(
+ label="π₯ Live Camera Stream",
+ rtc_configuration=rtc_config
+ )
+
+ webrtc_model = gr.Dropdown(
+ choices=["mediapipe-lite", "movenet-lightning", "yolo-v11-n"],
+ value="mediapipe-lite",
+ label="Pose Model",
+ info="Optimized for real-time processing"
+ )
+
+ confidence_slider = gr.Slider(
+ label="Detection Confidence",
+ minimum=0.0,
+ maximum=1.0,
+ step=0.05,
+ value=0.5,
+ info="Higher = fewer false positives"
+ )
+
+ # Official WebRTC streaming setup following Gradio guide
+ webrtc_stream.stream(
+ fn=webrtc_detection,
+ inputs=[webrtc_stream, webrtc_model, confidence_slider],
+ outputs=[webrtc_stream],
+ time_limit=10 # Following official guide: 10 seconds per user
+ )
+
+ # Info display
+ gr.HTML("""
+
+
πΉ WebRTC Pose Analysis
+
Real-time movement analysis using your webcam
+
+
π Privacy
+
Processing happens locally - no video data stored
+
+
π‘ Usage
+
+ - Grant camera permission when prompted
+ - Move in front of camera to see pose detection
+ - Adjust confidence threshold as needed
+
+
+ """)
+
+ else:
+ # Fallback if WebRTC component not available
+ gr.HTML("""
+
+
π¦ WebRTC Component Required
+
To enable real-time camera analysis, install:
+
+ pip install gradio-webrtc twilio
+
+
Use Enhanced Analysis tab for video files meanwhile
+
+ """)
+
+ # Tab 5: Model Comparison
+ with gr.Tab("βοΈ Model Comparison"):
+ gr.Markdown("""
+ ### Compare Pose Estimation Models
+ Analyze the same video with different models to compare accuracy and results.
+ """)
+
+ with gr.Column():
+ comparison_video = gr.Video(
+ label="Video for Comparison",
+ sources=["upload"]
+ )
+
+ with gr.Row():
+ model1_comp = gr.Dropdown(
+ choices=["mediapipe-full", "movenet-thunder", "yolo-v11-s"],
+ value="mediapipe-full",
+ label="Model 1"
+ )
+
+ model2_comp = gr.Dropdown(
+ choices=["mediapipe-full", "movenet-thunder", "yolo-v11-s"],
+ value="yolo-v11-s",
+ label="Model 2"
+ )
+
+ compare_btn = gr.Button("π Compare Models", variant="primary")
+
+ comparison_results = gr.DataFrame(
+ headers=["Metric", "Model 1", "Model 2", "Match"],
+ label="Comparison Results"
+ )
+
+ compare_btn.click(
+ fn=compare_models,
+ inputs=[comparison_video, model1_comp, model2_comp],
+ outputs=comparison_results,
+ api_name="compare_models"
+ )
+
+ # Tab 6: Documentation
+ with gr.Tab("π Documentation"):
+ gr.Markdown("""
+ # Complete Feature Documentation
+
+ ## π₯ Video Input Support
+ - **Local Files**: MP4, AVI, MOV, WebM formats
+ - **YouTube**: Automatic download from YouTube URLs
+ - **Vimeo**: Automatic download from Vimeo URLs
+ - **Direct URLs**: Any direct video file URL
+
+ ## π€ Pose Estimation Models
+
+ ### MediaPipe (Google) - 33 3D Landmarks
+ - **Lite**: Fastest CPU performance
+ - **Full**: Balanced accuracy/speed (recommended)
+ - **Heavy**: Highest accuracy
+
+ ### MoveNet (Google) - 17 COCO Keypoints
+ - **Lightning**: Mobile-optimized, very fast
+ - **Thunder**: Higher accuracy variant
+
+ ### YOLO (Ultralytics) - 17 COCO Keypoints
+ - **v8 variants**: n/s/m/l/x sizes (nano to extra-large)
+ - **v11 variants**: Latest with improved accuracy (n/s/m/l/x)
+ - **Multi-person**: Supports multiple people in frame
+
+ ## πΉ Real-time WebRTC
+
+ - **Live Camera**: Direct webcam access via WebRTC
+ - **Low Latency**: Sub-100ms processing
+ - **Adaptive Quality**: Automatic performance optimization
+ - **Live Overlay**: Real-time pose and metrics display
+
+ ## π€ Agent & MCP Integration
+
+ ### API Endpoints
+ - `/analyze_standard` - Basic LMA analysis
+ - `/analyze_enhanced` - Advanced analysis with all features
+ - `/analyze_agent` - Agent-optimized output
+ - `/batch_analyze` - Multiple video processing
+ - `/filter_videos` - Movement-based filtering
+ - `/compare_models` - Model comparison
+
+ ### MCP Server
+ ```bash
+ # Start MCP server for AI assistants
+ python -m backend.mcp_server
+ ```
+
+ ### Python API
+ ```python
+ from gradio_labanmovementanalysis import LabanMovementAnalysis
+
+ # Initialize with all features
+ analyzer = LabanMovementAnalysis(
+ enable_webrtc=True
+ )
+
+ # Analyze YouTube video
+ result, viz = analyzer.process_video(
+ "https://youtube.com/watch?v=...",
+ model="yolo-v11-s"
+ )
+ ```
+
+ ## π Output Formats
+
+ ### Summary Format
+ Human-readable movement analysis summary.
+
+ ### Structured Format
+ ```json
+ {
+ "success": true,
+ "direction": "up",
+ "intensity": "medium",
+ "fluidity": 0.85,
+ "expansion": 0.72
+ }
+ ```
+
+ ### Full JSON Format
+ Complete frame-by-frame analysis with all metrics.
+
+ ## π― Applications
+
+ - **Sports**: Technique analysis and performance tracking
+ - **Dance**: Choreography analysis and movement quality
+ - **Healthcare**: Physical therapy and rehabilitation
+ - **Research**: Large-scale movement pattern studies
+ - **Entertainment**: Interactive applications and games
+ - **Education**: Movement teaching and body awareness
+
+ ## π Integration Examples
+
+ ### Gradio Client
+ ```python
+ from gradio_client import Client
+
+ client = Client("http://localhost:7860")
+ result = client.predict(
+ video="path/to/video.mp4",
+ model="mediapipe-full",
+ api_name="/analyze_enhanced"
+ )
+ ```
+
+ ### Batch Processing
+ ```python
+ results = client.predict(
+ files=["video1.mp4", "video2.mp4"],
+ model="yolo-v11-s",
+ api_name="/batch_analyze"
+ )
+ ```
+ """)
+ gr.HTML("""
+
+ """)
+
+ # Footer with proper attribution
+ gr.HTML("""
+
+
+ π Laban Movement Analysis - Complete Suite | Heavy Beta Version
+
+
+ Created by Csaba BolyΓ³s | Powered by MediaPipe, MoveNet & YOLO
+
+
+ GitHub β’
+ Hugging Face β’
+ LinkedIn
+
+
+ """)
+
+ return demo
+
+
+if __name__ == "__main__":
+ demo = create_unified_demo()
+ demo.launch(
+ server_name="0.0.0.0",
+ server_port=7860,
+ share=False,
+ show_error=True,
+ favicon_path=None
+ )
+
+```
+
+## `LabanMovementAnalysis`
+
+### Initialization
+
+
+
+
+| name |
+type |
+default |
+description |
+
+
+
+
+default_model |
+
+
+```python
+str
+```
+
+ |
+"mediapipe" |
+Default pose estimation model ("mediapipe", "movenet", "yolo") |
+
+
+
+enable_visualization |
+
+
+```python
+bool
+```
+
+ |
+True |
+Whether to generate visualization video by default |
+
+
+
+include_keypoints |
+
+
+```python
+bool
+```
+
+ |
+False |
+Whether to include raw keypoints in JSON output |
+
+
+
+enable_webrtc |
+
+
+```python
+bool
+```
+
+ |
+False |
+Whether to enable WebRTC real-time analysis |
+
+
+
+label |
+
+
+```python
+typing.Optional[str][str, None]
+```
+
+ |
+None |
+Component label |
+
+
+
+every |
+
+
+```python
+typing.Optional[float][float, None]
+```
+
+ |
+None |
+None |
+
+
+
+show_label |
+
+
+```python
+typing.Optional[bool][bool, None]
+```
+
+ |
+None |
+None |
+
+
+
+container |
+
+
+```python
+bool
+```
+
+ |
+True |
+None |
+
+
+
+scale |
+
+
+```python
+typing.Optional[int][int, None]
+```
+
+ |
+None |
+None |
+
+
+
+min_width |
+
+
+```python
+int
+```
+
+ |
+160 |
+None |
+
+
+
+interactive |
+
+
+```python
+typing.Optional[bool][bool, None]
+```
+
+ |
+None |
+None |
+
+
+
+visible |
+
+
+```python
+bool
+```
+
+ |
+True |
+None |
+
+
+
+elem_id |
+
+
+```python
+typing.Optional[str][str, None]
+```
+
+ |
+None |
+None |
+
+
+
+elem_classes |
+
+
+```python
+typing.Optional[typing.List[str]][
+ typing.List[str][str], None
+]
+```
+
+ |
+None |
+None |
+
+
+
+render |
+
+
+```python
+bool
+```
+
+ |
+True |
+None |
+
+
+
+
+
+
+### User function
+
+The impact on the users predict function varies depending on whether the component is used as an input or output for an event (or both).
+
+- When used as an Input, the component only impacts the input signature of the user function.
+- When used as an output, the component only impacts the return signature of the user function.
+
+The code snippet below is accurate in cases where the component is used as both an input and an output.
+
+- **As output:** Is passed, processed data for analysis.
+- **As input:** Should return, analysis results.
+
+ ```python
+ def predict(
+ value: typing.Dict[str, typing.Any][str, typing.Any]
+ ) -> typing.Any:
+ return value
+ ```
+
diff --git a/app.py b/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd7f51815bfba32fbc968534efcf27762bbc33d3
--- /dev/null
+++ b/app.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+"""
+Laban Movement Analysis - Complete Suite
+Hugging Face Spaces Deployment
+
+Created by: Csaba BolyΓ³s (BladeSzaSza)
+Contact: bladeszasza@gmail.com
+GitHub: https://github.com/bladeszasza
+LinkedIn: https://www.linkedin.com/in/csaba-bolyΓ³s-00a11767/
+Hugging Face: https://huggingface.co/BladeSzaSza
+
+Heavy Beta Version - Under Active Development
+"""
+
+import sys
+from pathlib import Path
+
+# Import version info
+try:
+ from version import __version__, __author__, get_version_info
+ print(f"π Laban Movement Analysis v{__version__} by {__author__}")
+except ImportError:
+ __version__ = "0.01-beta"
+ print("π Laban Movement Analysis - Version info not found")
+
+# Add demo directory to path
+sys.path.insert(0, str(Path(__file__).parent / "demo"))
+
+try:
+ from app import create_unified_demo
+
+ if __name__ == "__main__":
+ demo = create_unified_demo()
+ demo.launch(
+ server_name="0.0.0.0",
+ server_port=7860,
+ share=False,
+ show_error=True,
+ favicon_path=None,
+ show_api=True
+ )
+
+except ImportError as e:
+ print(f"Import error: {e}")
+ print("Make sure all dependencies are installed.")
+
+except Exception as e:
+ print(f"Error launching demo: {e}")
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/__init__.py b/backend/gradio_labanmovementanalysis/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..437eff1a7761da3b78d47b090acf49272b00e48b
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/__init__.py
@@ -0,0 +1,51 @@
+from .labanmovementanalysis import LabanMovementAnalysis
+from . import video_utils
+from . import pose_estimation
+from . import notation_engine
+from . import json_generator
+from . import visualizer
+
+# Import agent API if available
+try:
+ from . import agent_api
+ from .agent_api import LabanAgentAPI, quick_analyze, analyze_and_summarize
+ _has_agent_api = True
+except ImportError:
+ _has_agent_api = False
+
+__all__ = [
+ 'LabanMovementAnalysis',
+ 'video_utils',
+ 'pose_estimation',
+ 'notation_engine',
+ 'json_generator',
+ 'visualizer'
+]
+
+# Add agent API to exports if available
+if _has_agent_api:
+ __all__.extend(['agent_api', 'LabanAgentAPI', 'quick_analyze', 'analyze_and_summarize'])
+
+# Import enhanced features if available
+try:
+ from . import video_downloader
+ from .video_downloader import VideoDownloader, SmartVideoInput
+ __all__.extend(['video_downloader', 'VideoDownloader', 'SmartVideoInput'])
+except ImportError:
+ pass
+
+try:
+ from . import webrtc_handler
+ from .webrtc_handler import WebRTCMovementAnalyzer, WebRTCGradioInterface
+ __all__.extend(['webrtc_handler', 'WebRTCMovementAnalyzer', 'WebRTCGradioInterface'])
+except ImportError:
+ pass
+
+try:
+ # SkateFormer integration reserved for Version 2
+ # from . import skateformer_integration
+ # from .skateformer_integration import SkateFormerAnalyzer, SkateFormerConfig
+ # __all__.extend(['skateformer_integration', 'SkateFormerAnalyzer', 'SkateFormerConfig'])
+ pass
+except ImportError:
+ pass
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/__init__.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/__init__.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..89ffe115df7206e93b7820759cad29c50b6d9a4a
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/__init__.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/__init__.cpython-313.pyc b/backend/gradio_labanmovementanalysis/__pycache__/__init__.cpython-313.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a8c43f2dada7032ce4ab167d320dffb5766483b6
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/__init__.cpython-313.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/agent_api.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/agent_api.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b429a4f87837600fb96669fd281368ea3074a7e9
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/agent_api.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/json_generator.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/json_generator.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0ded7015588b5a4f61e37a8903f9312d3847bd03
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/json_generator.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/labanmovementanalysis.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/labanmovementanalysis.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1a1ff1dfdccf9d6b02c66a478cd8194aebdbb06d
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/labanmovementanalysis.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/labanmovementanalysis.cpython-313.pyc b/backend/gradio_labanmovementanalysis/__pycache__/labanmovementanalysis.cpython-313.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0ced717299013854b1956b19953444c864e54239
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/labanmovementanalysis.cpython-313.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/notation_engine.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/notation_engine.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8889ef70002c979f892f62c077c9c27c42e50ef1
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/notation_engine.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/pose_estimation.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/pose_estimation.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f42c10fb5a02496e4fe840464029a2dbf3dc5b3c
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/pose_estimation.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/skateformer_integration.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/skateformer_integration.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e7b2571c47d69b747221998639a2e22a2f00a3dc
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/skateformer_integration.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/video_downloader.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/video_downloader.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..780c88603872e38c6d599593994cc75da33df396
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/video_downloader.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/video_utils.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/video_utils.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2d11ff68a5f5b8c1eaa2c471ae93dfa5895cb984
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/video_utils.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/visualizer.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/visualizer.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6525e58c083c1d92875c4f1970436a2620d0ad82
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/visualizer.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/__pycache__/webrtc_handler.cpython-312.pyc b/backend/gradio_labanmovementanalysis/__pycache__/webrtc_handler.cpython-312.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..43702b26865e83f071358155eb4d347ad58b54d2
Binary files /dev/null and b/backend/gradio_labanmovementanalysis/__pycache__/webrtc_handler.cpython-312.pyc differ
diff --git a/backend/gradio_labanmovementanalysis/agent_api.py b/backend/gradio_labanmovementanalysis/agent_api.py
new file mode 100644
index 0000000000000000000000000000000000000000..abfc52a74ab6f38dc0753a9f4788d26236839254
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/agent_api.py
@@ -0,0 +1,434 @@
+"""
+Agent-friendly API for Laban Movement Analysis
+Provides simplified interfaces for AI agents and automation
+"""
+
+import asyncio
+import json
+import logging
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Union
+from dataclasses import dataclass, asdict
+from enum import Enum
+
+# Configure logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+# Import the main component
+from .labanmovementanalysis import LabanMovementAnalysis
+
+
+class PoseModel(str, Enum):
+ """Available pose estimation models"""
+ MEDIAPIPE = "mediapipe"
+ MOVENET = "movenet"
+ YOLO = "yolo"
+
+
+class MovementIntensity(str, Enum):
+ """Movement intensity levels"""
+ LOW = "low"
+ MEDIUM = "medium"
+ HIGH = "high"
+
+
+class MovementDirection(str, Enum):
+ """Movement direction categories"""
+ UP = "up"
+ DOWN = "down"
+ LEFT = "left"
+ RIGHT = "right"
+ STATIONARY = "stationary"
+
+
+@dataclass
+class AnalysisResult:
+ """Structured analysis result for agents"""
+ success: bool
+ video_path: str
+ duration_seconds: float
+ fps: float
+ dominant_direction: MovementDirection
+ dominant_intensity: MovementIntensity
+ dominant_speed: str
+ movement_segments: List[Dict[str, Any]]
+ fluidity_score: float
+ expansion_score: float
+ error: Optional[str] = None
+ raw_data: Optional[Dict[str, Any]] = None
+ visualization_path: Optional[str] = None
+
+
+class LabanAgentAPI:
+ """
+ Simplified API for AI agents to analyze movement in videos.
+ Provides high-level methods with structured outputs.
+ """
+
+ # Gradio component compatibility
+ events = {}
+
+ def __init__(self, default_model: PoseModel = PoseModel.MEDIAPIPE):
+ """
+ Initialize the agent API.
+
+ Args:
+ default_model: Default pose estimation model to use
+ """
+ self.analyzer = LabanMovementAnalysis(default_model=default_model.value)
+ self.default_model = default_model
+ self._analysis_cache = {}
+
+ def analyze(
+ self,
+ video_path: Union[str, Path],
+ model: Optional[PoseModel] = None,
+ generate_visualization: bool = False,
+ cache_results: bool = True
+ ) -> AnalysisResult:
+ """
+ Analyze a video and return structured results.
+
+ Args:
+ video_path: Path to video file
+ model: Pose estimation model to use (defaults to instance default)
+ generate_visualization: Whether to create annotated video
+ cache_results: Whether to cache results for later retrieval
+
+ Returns:
+ AnalysisResult with structured movement data
+ """
+ try:
+ # Convert path to string
+ video_path = str(video_path)
+
+ # Use default model if not specified
+ if model is None:
+ model = self.default_model
+
+ # Process video
+ json_output, viz_video = self.analyzer.process_video(
+ video_path,
+ model=model.value,
+ enable_visualization=generate_visualization,
+ include_keypoints=False
+ )
+
+ # Parse results
+ result = self._parse_analysis_output(
+ json_output,
+ video_path,
+ viz_video
+ )
+
+ # Cache if requested
+ if cache_results:
+ cache_key = f"{Path(video_path).stem}_{model.value}"
+ self._analysis_cache[cache_key] = result
+
+ return result
+
+ except Exception as e:
+ logger.error(f"Analysis failed: {str(e)}")
+ return AnalysisResult(
+ success=False,
+ video_path=str(video_path),
+ duration_seconds=0.0,
+ fps=0.0,
+ dominant_direction=MovementDirection.STATIONARY,
+ dominant_intensity=MovementIntensity.LOW,
+ dominant_speed="unknown",
+ movement_segments=[],
+ fluidity_score=0.0,
+ expansion_score=0.0,
+ error=str(e)
+ )
+
+ async def analyze_async(
+ self,
+ video_path: Union[str, Path],
+ model: Optional[PoseModel] = None,
+ generate_visualization: bool = False
+ ) -> AnalysisResult:
+ """
+ Asynchronously analyze a video.
+
+ Args:
+ video_path: Path to video file
+ model: Pose estimation model to use
+ generate_visualization: Whether to create annotated video
+
+ Returns:
+ AnalysisResult with structured movement data
+ """
+ loop = asyncio.get_event_loop()
+ return await loop.run_in_executor(
+ None,
+ self.analyze,
+ video_path,
+ model,
+ generate_visualization
+ )
+
+ def batch_analyze(
+ self,
+ video_paths: List[Union[str, Path]],
+ model: Optional[PoseModel] = None,
+ parallel: bool = True,
+ max_workers: int = 4
+ ) -> List[AnalysisResult]:
+ """
+ Analyze multiple videos in batch.
+
+ Args:
+ video_paths: List of video file paths
+ model: Pose estimation model to use
+ parallel: Whether to process in parallel
+ max_workers: Maximum parallel workers
+
+ Returns:
+ List of AnalysisResult objects
+ """
+ if parallel:
+ from concurrent.futures import ThreadPoolExecutor
+
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
+ futures = [
+ executor.submit(self.analyze, path, model, False)
+ for path in video_paths
+ ]
+ results = [future.result() for future in futures]
+ else:
+ results = [
+ self.analyze(path, model, False)
+ for path in video_paths
+ ]
+
+ return results
+
+ def get_movement_summary(self, analysis_result: AnalysisResult) -> str:
+ """
+ Generate a natural language summary of movement analysis.
+
+ Args:
+ analysis_result: Analysis result to summarize
+
+ Returns:
+ Human-readable summary string
+ """
+ if not analysis_result.success:
+ return f"Analysis failed: {analysis_result.error}"
+
+ summary_parts = [
+ f"Movement Analysis Summary for {Path(analysis_result.video_path).name}:",
+ f"- Duration: {analysis_result.duration_seconds:.1f} seconds",
+ f"- Primary movement direction: {analysis_result.dominant_direction.value}",
+ f"- Movement intensity: {analysis_result.dominant_intensity.value}",
+ f"- Movement speed: {analysis_result.dominant_speed}",
+ f"- Fluidity score: {analysis_result.fluidity_score:.2f}/1.00",
+ f"- Expansion score: {analysis_result.expansion_score:.2f}/1.00"
+ ]
+
+ if analysis_result.movement_segments:
+ summary_parts.append(f"- Detected {len(analysis_result.movement_segments)} movement segments")
+
+ return "\n".join(summary_parts)
+
+ def compare_videos(
+ self,
+ video_path1: Union[str, Path],
+ video_path2: Union[str, Path],
+ model: Optional[PoseModel] = None
+ ) -> Dict[str, Any]:
+ """
+ Compare movement patterns between two videos.
+
+ Args:
+ video_path1: First video path
+ video_path2: Second video path
+ model: Pose estimation model to use
+
+ Returns:
+ Comparison results dictionary
+ """
+ # Analyze both videos
+ result1 = self.analyze(video_path1, model, False)
+ result2 = self.analyze(video_path2, model, False)
+
+ if not result1.success or not result2.success:
+ return {
+ "success": False,
+ "error": "One or both analyses failed"
+ }
+
+ # Compare metrics
+ comparison = {
+ "success": True,
+ "video1": Path(video_path1).name,
+ "video2": Path(video_path2).name,
+ "metrics": {
+ "direction_match": result1.dominant_direction == result2.dominant_direction,
+ "intensity_match": result1.dominant_intensity == result2.dominant_intensity,
+ "speed_match": result1.dominant_speed == result2.dominant_speed,
+ "fluidity_difference": abs(result1.fluidity_score - result2.fluidity_score),
+ "expansion_difference": abs(result1.expansion_score - result2.expansion_score)
+ },
+ "details": {
+ "video1": {
+ "direction": result1.dominant_direction.value,
+ "intensity": result1.dominant_intensity.value,
+ "speed": result1.dominant_speed,
+ "fluidity": result1.fluidity_score,
+ "expansion": result1.expansion_score
+ },
+ "video2": {
+ "direction": result2.dominant_direction.value,
+ "intensity": result2.dominant_intensity.value,
+ "speed": result2.dominant_speed,
+ "fluidity": result2.fluidity_score,
+ "expansion": result2.expansion_score
+ }
+ }
+ }
+
+ return comparison
+
+ def filter_by_movement(
+ self,
+ video_paths: List[Union[str, Path]],
+ direction: Optional[MovementDirection] = None,
+ intensity: Optional[MovementIntensity] = None,
+ min_fluidity: Optional[float] = None,
+ min_expansion: Optional[float] = None
+ ) -> List[AnalysisResult]:
+ """
+ Filter videos based on movement characteristics.
+
+ Args:
+ video_paths: List of video paths to analyze
+ direction: Filter by movement direction
+ intensity: Filter by movement intensity
+ min_fluidity: Minimum fluidity score
+ min_expansion: Minimum expansion score
+
+ Returns:
+ List of AnalysisResults that match criteria
+ """
+ # Analyze all videos
+ results = self.batch_analyze(video_paths)
+
+ # Apply filters
+ filtered = []
+ for result in results:
+ if not result.success:
+ continue
+
+ if direction and result.dominant_direction != direction:
+ continue
+
+ if intensity and result.dominant_intensity != intensity:
+ continue
+
+ if min_fluidity and result.fluidity_score < min_fluidity:
+ continue
+
+ if min_expansion and result.expansion_score < min_expansion:
+ continue
+
+ filtered.append(result)
+
+ return filtered
+
+ def _parse_analysis_output(
+ self,
+ json_output: Dict[str, Any],
+ video_path: str,
+ viz_path: Optional[str]
+ ) -> AnalysisResult:
+ """Parse JSON output into structured result"""
+ try:
+ # Extract video info
+ video_info = json_output.get("video_info", {})
+ duration = video_info.get("duration_seconds", 0.0)
+ fps = video_info.get("fps", 0.0)
+
+ # Extract movement summary
+ movement_analysis = json_output.get("movement_analysis", {})
+ summary = movement_analysis.get("summary", {})
+
+ # Parse dominant metrics
+ direction_data = summary.get("direction", {})
+ dominant_direction = direction_data.get("dominant", "stationary")
+ dominant_direction = MovementDirection(dominant_direction.lower())
+
+ intensity_data = summary.get("intensity", {})
+ dominant_intensity = intensity_data.get("dominant", "low")
+ dominant_intensity = MovementIntensity(dominant_intensity.lower())
+
+ speed_data = summary.get("speed", {})
+ dominant_speed = speed_data.get("dominant", "unknown")
+
+ # Get segments
+ segments = summary.get("movement_segments", [])
+
+ # Calculate aggregate scores
+ frames = movement_analysis.get("frames", [])
+ fluidity_scores = [f.get("metrics", {}).get("fluidity", 0) for f in frames]
+ expansion_scores = [f.get("metrics", {}).get("expansion", 0) for f in frames]
+
+ avg_fluidity = sum(fluidity_scores) / len(fluidity_scores) if fluidity_scores else 0.0
+ avg_expansion = sum(expansion_scores) / len(expansion_scores) if expansion_scores else 0.0
+
+ return AnalysisResult(
+ success=True,
+ video_path=video_path,
+ duration_seconds=duration,
+ fps=fps,
+ dominant_direction=dominant_direction,
+ dominant_intensity=dominant_intensity,
+ dominant_speed=dominant_speed,
+ movement_segments=segments,
+ fluidity_score=avg_fluidity,
+ expansion_score=avg_expansion,
+ raw_data=json_output,
+ visualization_path=viz_path
+ )
+
+ except Exception as e:
+ logger.error(f"Failed to parse analysis output: {str(e)}")
+ return AnalysisResult(
+ success=False,
+ video_path=video_path,
+ duration_seconds=0.0,
+ fps=0.0,
+ dominant_direction=MovementDirection.STATIONARY,
+ dominant_intensity=MovementIntensity.LOW,
+ dominant_speed="unknown",
+ movement_segments=[],
+ fluidity_score=0.0,
+ expansion_score=0.0,
+ error=f"Parse error: {str(e)}",
+ raw_data=json_output
+ )
+
+
+# Convenience functions for quick analysis
+def quick_analyze(video_path: Union[str, Path]) -> Dict[str, Any]:
+ """Quick analysis with default settings, returns dict"""
+ api = LabanAgentAPI()
+ result = api.analyze(video_path)
+ return asdict(result)
+
+# Gradio component compatibility
+quick_analyze.events = {}
+
+
+def analyze_and_summarize(video_path: Union[str, Path]) -> str:
+ """Analyze video and return natural language summary"""
+ api = LabanAgentAPI()
+ result = api.analyze(video_path)
+ return api.get_movement_summary(result)
+
+# Gradio component compatibility
+analyze_and_summarize.events = {}
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/json_generator.py b/backend/gradio_labanmovementanalysis/json_generator.py
new file mode 100644
index 0000000000000000000000000000000000000000..7272aa1fd03851eec23a82cba2f6c9b89c7d0617
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/json_generator.py
@@ -0,0 +1,250 @@
+"""
+JSON generator for converting movement annotations and keypoint data to structured JSON format.
+"""
+
+import json
+from typing import List, Dict, Any, Optional
+from datetime import datetime
+
+from .pose_estimation import PoseResult, Keypoint
+from .notation_engine import MovementMetrics, Direction, Intensity, Speed
+
+
+def generate_json(
+ movement_metrics: List[MovementMetrics],
+ pose_results: Optional[List[List[PoseResult]]] = None,
+ video_metadata: Optional[Dict[str, Any]] = None,
+ include_keypoints: bool = False
+) -> Dict[str, Any]:
+ """
+ Generate structured JSON output from movement analysis results.
+
+ Args:
+ movement_metrics: List of movement metrics per frame
+ pose_results: Optional pose keypoints per frame
+ video_metadata: Optional video metadata (fps, dimensions, etc.)
+ include_keypoints: Whether to include raw keypoint data
+
+ Returns:
+ Dictionary containing formatted analysis results
+ """
+ output = {
+ "analysis_metadata": {
+ "timestamp": datetime.now().isoformat(),
+ "version": "1.0.0",
+ "model_info": video_metadata.get("model_info", {}) if video_metadata else {}
+ },
+ "video_info": {},
+ "movement_analysis": {
+ "frame_count": len(movement_metrics),
+ "frames": []
+ }
+ }
+
+ # Add video metadata if provided
+ if video_metadata:
+ output["video_info"] = {
+ "fps": video_metadata.get("fps", 30.0),
+ "duration_seconds": len(movement_metrics) / video_metadata.get("fps", 30.0),
+ "width": video_metadata.get("width"),
+ "height": video_metadata.get("height"),
+ "frame_count": video_metadata.get("frame_count", len(movement_metrics))
+ }
+
+ # Process each frame's metrics
+ for i, metrics in enumerate(movement_metrics):
+ frame_data = {
+ "frame_index": metrics.frame_index,
+ "timestamp": metrics.timestamp,
+ "metrics": {
+ "direction": metrics.direction.value,
+ "intensity": metrics.intensity.value,
+ "speed": metrics.speed.value,
+ "velocity": round(metrics.velocity, 4),
+ "acceleration": round(metrics.acceleration, 4),
+ "fluidity": round(metrics.fluidity, 3),
+ "expansion": round(metrics.expansion, 3),
+ "total_displacement": round(metrics.total_displacement, 4)
+ }
+ }
+
+ # Add displacement if available
+ if metrics.center_displacement:
+ frame_data["metrics"]["center_displacement"] = {
+ "x": round(metrics.center_displacement[0], 4),
+ "y": round(metrics.center_displacement[1], 4)
+ }
+
+ # Add keypoints if requested and available
+ if include_keypoints and pose_results and i < len(pose_results):
+ frame_poses = pose_results[i]
+ if frame_poses:
+ frame_data["keypoints"] = []
+ for pose in frame_poses:
+ keypoint_data = {
+ "person_id": pose.person_id,
+ "points": []
+ }
+ for kp in pose.keypoints:
+ keypoint_data["points"].append({
+ "name": kp.name,
+ "x": round(kp.x, 4),
+ "y": round(kp.y, 4),
+ "confidence": round(kp.confidence, 3)
+ })
+ frame_data["keypoints"].append(keypoint_data)
+
+ output["movement_analysis"]["frames"].append(frame_data)
+
+ # Add summary statistics
+ output["movement_analysis"]["summary"] = _generate_summary(movement_metrics)
+
+ return output
+
+
+def _generate_summary(metrics: List[MovementMetrics]) -> Dict[str, Any]:
+ """Generate summary statistics from movement metrics."""
+ if not metrics:
+ return {}
+
+ # Count occurrences of each category
+ direction_counts = {}
+ intensity_counts = {}
+ speed_counts = {}
+
+ velocities = []
+ accelerations = []
+ fluidities = []
+ expansions = []
+
+ for m in metrics:
+ # Count categories
+ direction_counts[m.direction.value] = direction_counts.get(m.direction.value, 0) + 1
+ intensity_counts[m.intensity.value] = intensity_counts.get(m.intensity.value, 0) + 1
+ speed_counts[m.speed.value] = speed_counts.get(m.speed.value, 0) + 1
+
+ # Collect numeric values
+ velocities.append(m.velocity)
+ accelerations.append(m.acceleration)
+ fluidities.append(m.fluidity)
+ expansions.append(m.expansion)
+
+ # Calculate statistics
+ import numpy as np
+
+ summary = {
+ "direction": {
+ "distribution": direction_counts,
+ "dominant": max(direction_counts, key=direction_counts.get)
+ },
+ "intensity": {
+ "distribution": intensity_counts,
+ "dominant": max(intensity_counts, key=intensity_counts.get)
+ },
+ "speed": {
+ "distribution": speed_counts,
+ "dominant": max(speed_counts, key=speed_counts.get)
+ },
+ "velocity": {
+ "mean": round(float(np.mean(velocities)), 4),
+ "std": round(float(np.std(velocities)), 4),
+ "min": round(float(np.min(velocities)), 4),
+ "max": round(float(np.max(velocities)), 4)
+ },
+ "acceleration": {
+ "mean": round(float(np.mean(accelerations)), 4),
+ "std": round(float(np.std(accelerations)), 4),
+ "min": round(float(np.min(accelerations)), 4),
+ "max": round(float(np.max(accelerations)), 4)
+ },
+ "fluidity": {
+ "mean": round(float(np.mean(fluidities)), 3),
+ "std": round(float(np.std(fluidities)), 3)
+ },
+ "expansion": {
+ "mean": round(float(np.mean(expansions)), 3),
+ "std": round(float(np.std(expansions)), 3)
+ }
+ }
+
+ # Identify significant movement segments
+ summary["movement_segments"] = _identify_movement_segments(metrics)
+
+ return summary
+
+
+def _identify_movement_segments(metrics: List[MovementMetrics]) -> List[Dict[str, Any]]:
+ """Identify significant movement segments (e.g., bursts of activity)."""
+ segments = []
+
+ # Simple segmentation based on intensity changes
+ current_segment = None
+ intensity_threshold = Intensity.MEDIUM
+
+ for i, m in enumerate(metrics):
+ if m.intensity.value >= intensity_threshold.value:
+ if current_segment is None:
+ # Start new segment
+ current_segment = {
+ "start_frame": i,
+ "start_time": m.timestamp,
+ "peak_velocity": m.velocity,
+ "dominant_direction": m.direction.value
+ }
+ else:
+ # Update segment
+ if m.velocity > current_segment["peak_velocity"]:
+ current_segment["peak_velocity"] = m.velocity
+ current_segment["dominant_direction"] = m.direction.value
+ else:
+ if current_segment is not None:
+ # End segment
+ current_segment["end_frame"] = i - 1
+ current_segment["end_time"] = metrics[i-1].timestamp if i > 0 else 0
+ current_segment["duration"] = (
+ current_segment["end_time"] - current_segment["start_time"]
+ )
+ current_segment["peak_velocity"] = round(current_segment["peak_velocity"], 4)
+ segments.append(current_segment)
+ current_segment = None
+
+ # Handle segment that extends to end
+ if current_segment is not None:
+ current_segment["end_frame"] = len(metrics) - 1
+ current_segment["end_time"] = metrics[-1].timestamp
+ current_segment["duration"] = (
+ current_segment["end_time"] - current_segment["start_time"]
+ )
+ current_segment["peak_velocity"] = round(current_segment["peak_velocity"], 4)
+ segments.append(current_segment)
+
+ return segments
+
+
+def save_json(data: Dict[str, Any], output_path: str, pretty: bool = True) -> None:
+ """
+ Save JSON data to file.
+
+ Args:
+ data: Dictionary to save
+ output_path: Path to output file
+ pretty: Whether to format JSON with indentation
+ """
+ with open(output_path, 'w') as f:
+ if pretty:
+ json.dump(data, f, indent=2, sort_keys=False)
+ else:
+ json.dump(data, f)
+
+
+def format_for_display(data: Dict[str, Any]) -> str:
+ """
+ Format JSON data for display in Gradio.
+
+ Args:
+ data: Dictionary to format
+
+ Returns:
+ Formatted JSON string
+ """
+ return json.dumps(data, indent=2, sort_keys=False)
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/labanmovementanalysis.py b/backend/gradio_labanmovementanalysis/labanmovementanalysis.py
new file mode 100644
index 0000000000000000000000000000000000000000..6fc66d04b0d063b33ca41dec08899e843450d8a7
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/labanmovementanalysis.py
@@ -0,0 +1,442 @@
+"""
+Custom Gradio v5 component for video-based pose analysis with LMA-inspired metrics.
+"""
+
+import gradio as gr
+from gradio.components.base import Component
+from typing import Dict, Any, Optional, Tuple, List, Union
+import tempfile
+import os
+import numpy as np
+
+from .video_utils import extract_frames, get_video_info
+from .pose_estimation import get_pose_estimator
+from .notation_engine import analyze_pose_sequence
+from .json_generator import generate_json, format_for_display
+from .visualizer import PoseVisualizer
+from .video_downloader import SmartVideoInput
+
+# Advanced features reserved for Version 2
+# SkateFormer AI integration will be available in future release
+
+try:
+ from .webrtc_handler import WebRTCMovementAnalyzer, WebRTCGradioInterface
+ HAS_WEBRTC = True
+except ImportError:
+ HAS_WEBRTC = False
+
+
+# SkateFormerCompatibility class removed for Version 1 stability
+# Will be reimplemented in Version 2 with enhanced AI features
+
+
+class LabanMovementAnalysis(Component):
+ """
+ Gradio component for video-based pose analysis with Laban Movement Analysis metrics.
+ """
+
+ # Component metadata
+ COMPONENT_TYPE = "composite"
+ DEFAULT_MODEL = "mediapipe"
+
+ def __init__(self,
+ default_model: str = DEFAULT_MODEL,
+ enable_visualization: bool = True,
+ include_keypoints: bool = False,
+ enable_webrtc: bool = False,
+ label: Optional[str] = None,
+ every: Optional[float] = None,
+ show_label: Optional[bool] = None,
+ container: bool = True,
+ scale: Optional[int] = None,
+ min_width: int = 160,
+ interactive: Optional[bool] = None,
+ visible: bool = True,
+ elem_id: Optional[str] = None,
+ elem_classes: Optional[List[str]] = None,
+ render: bool = True,
+ **kwargs):
+ """
+ Initialize the Laban Movement Analysis component.
+
+ Args:
+ default_model: Default pose estimation model ("mediapipe", "movenet", "yolo")
+ enable_visualization: Whether to generate visualization video by default
+ include_keypoints: Whether to include raw keypoints in JSON output
+ enable_webrtc: Whether to enable WebRTC real-time analysis
+ label: Component label
+ ... (other standard Gradio component args)
+ """
+ super().__init__(
+ label=label,
+ every=every,
+ show_label=show_label,
+ container=container,
+ scale=scale,
+ min_width=min_width,
+ interactive=interactive,
+ visible=visible,
+ elem_id=elem_id,
+ elem_classes=elem_classes,
+ render=render,
+ **kwargs
+ )
+
+ self.default_model = default_model
+ self.enable_visualization = enable_visualization
+ self.include_keypoints = include_keypoints
+ self.enable_webrtc = enable_webrtc and HAS_WEBRTC
+
+ # Cache for pose estimators
+ self._estimators = {}
+
+ # Video input handler for URLs
+ self.video_input = SmartVideoInput()
+
+ # SkateFormer features reserved for Version 2
+
+ self.webrtc_analyzer = None
+ if self.enable_webrtc:
+ try:
+ self.webrtc_analyzer = WebRTCMovementAnalyzer(model=default_model)
+ except Exception as e:
+ print(f"Warning: Failed to initialize WebRTC: {e}")
+ self.enable_webrtc = False
+
+ def preprocess(self, payload: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Preprocess input from the frontend.
+
+ Args:
+ payload: Input data containing video file and options
+
+ Returns:
+ Processed data for analysis
+ """
+ if not payload:
+ return None
+
+ # Extract video file path
+ video_data = payload.get("video")
+ if not video_data:
+ return None
+
+ # Handle different input formats
+ if isinstance(video_data, str):
+ video_path = video_data
+ elif isinstance(video_data, dict):
+ video_path = video_data.get("path") or video_data.get("name")
+ else:
+ # Assume it's a file object
+ video_path = video_data.name if hasattr(video_data, "name") else str(video_data)
+
+ # Extract options
+ options = {
+ "video_path": video_path,
+ "model": payload.get("model", self.default_model),
+ "enable_visualization": payload.get("enable_visualization", self.enable_visualization),
+ "include_keypoints": payload.get("include_keypoints", self.include_keypoints)
+ }
+
+ return options
+
+ def postprocess(self, value: Any) -> Dict[str, Any]:
+ """
+ Postprocess analysis results for the frontend.
+
+ Args:
+ value: Analysis results
+
+ Returns:
+ Formatted output for display
+ """
+ if value is None:
+ return {"json_output": {}, "video_output": None}
+
+ # Ensure we have the expected format
+ if isinstance(value, tuple) and len(value) == 2:
+ json_data, video_path = value
+ else:
+ json_data = value
+ video_path = None
+
+ return {
+ "json_output": json_data,
+ "video_output": video_path
+ }
+
+ def process_video(self, video_input: Union[str, os.PathLike], model: str = DEFAULT_MODEL,
+ enable_visualization: bool = True,
+ include_keypoints: bool = False) -> Tuple[Dict[str, Any], Optional[str]]:
+ """
+ Main processing function that performs pose analysis on a video.
+
+ Args:
+ video_input: Path to input video, video URL (YouTube/Vimeo), or file object
+ model: Pose estimation model to use (supports enhanced syntax like "yolo-v11-s")
+ enable_visualization: Whether to generate visualization video
+ include_keypoints: Whether to include keypoints in JSON
+
+ Returns:
+ Tuple of (analysis_json, visualization_video_path)
+ """
+ # Handle video input (local file, URL, etc.)
+ try:
+ video_path, video_metadata = self.video_input.process_input(str(video_input))
+ print(f"Processing video: {video_metadata.get('title', 'Unknown')}")
+ if video_metadata.get('platform') in ['youtube', 'vimeo']:
+ print(f"Downloaded from {video_metadata['platform']}")
+ except Exception as e:
+ raise ValueError(f"Failed to process video input: {str(e)}")
+ # Get video metadata
+ frame_count, fps, (width, height) = get_video_info(video_path)
+
+ # Create or get pose estimator
+ if model not in self._estimators:
+ self._estimators[model] = get_pose_estimator(model)
+ estimator = self._estimators[model]
+
+ # Process video frame by frame
+ print(f"Processing {frame_count} frames with {model} model...")
+
+ all_frames = []
+ all_pose_results = []
+
+ for i, frame in enumerate(extract_frames(video_path)):
+ # Store frame if visualization is needed
+ if enable_visualization:
+ all_frames.append(frame)
+
+ # Detect poses
+ pose_results = estimator.detect(frame)
+
+ # Update frame indices
+ for result in pose_results:
+ result.frame_index = i
+
+ all_pose_results.append(pose_results)
+
+ # Progress indicator
+ if i % 30 == 0:
+ print(f"Processed {i}/{frame_count} frames...")
+
+ print("Analyzing movement patterns...")
+
+ # Analyze movement
+ movement_metrics = analyze_pose_sequence(all_pose_results, fps=fps)
+
+ # Enhanced AI analysis reserved for Version 2
+ print("LMA analysis complete - advanced AI features coming in Version 2!")
+
+ # Generate JSON output
+ video_metadata = {
+ "fps": fps,
+ "width": width,
+ "height": height,
+ "frame_count": frame_count,
+ "model_info": {
+ "name": model,
+ "type": "pose_estimation"
+ },
+ "input_metadata": video_metadata # Include video source metadata
+ }
+
+ json_output = generate_json(
+ movement_metrics,
+ all_pose_results if include_keypoints else None,
+ video_metadata,
+ include_keypoints=include_keypoints
+ )
+
+ # Enhanced AI analysis will be added in Version 2
+
+ # Generate visualization if requested
+ visualization_path = None
+ if enable_visualization:
+ print("Generating visualization video...")
+
+ # Create temporary output file
+ with tempfile.NamedTemporaryFile(suffix='.mp4', delete=False) as tmp:
+ visualization_path = tmp.name
+
+ # Create visualizer
+ visualizer = PoseVisualizer(
+ show_trails=True,
+ show_skeleton=True,
+ show_direction_arrows=True,
+ show_metrics=True
+ )
+
+ # Generate overlay video
+ visualization_path = visualizer.generate_overlay_video(
+ all_frames,
+ all_pose_results,
+ movement_metrics,
+ visualization_path,
+ fps
+ )
+
+ print(f"Visualization saved to: {visualization_path}")
+
+ return json_output, visualization_path
+
+ def __call__(self, video_path: str, **kwargs) -> Tuple[Dict[str, Any], Optional[str]]:
+ """
+ Make the component callable for easy use.
+
+ Args:
+ video_path: Path to video file
+ **kwargs: Additional options
+
+ Returns:
+ Analysis results
+ """
+ return self.process_video(video_path, **kwargs)
+
+ def start_webrtc_stream(self, model: str = None) -> bool:
+ """
+ Start WebRTC real-time analysis stream.
+
+ Args:
+ model: Pose model to use for real-time analysis
+
+ Returns:
+ True if stream started successfully
+ """
+ if not self.enable_webrtc or not self.webrtc_analyzer:
+ print("WebRTC not enabled or available")
+ return False
+
+ try:
+ if model:
+ self.webrtc_analyzer.model = model
+ self.webrtc_analyzer.pose_estimator = get_pose_estimator(model)
+
+ self.webrtc_analyzer.start_stream()
+ print(f"WebRTC stream started with {self.webrtc_analyzer.model} model")
+ return True
+ except Exception as e:
+ print(f"Failed to start WebRTC stream: {e}")
+ return False
+
+ def stop_webrtc_stream(self) -> bool:
+ """
+ Stop WebRTC real-time analysis stream.
+
+ Returns:
+ True if stream stopped successfully
+ """
+ if not self.webrtc_analyzer:
+ return False
+
+ try:
+ self.webrtc_analyzer.stop_stream()
+ print("WebRTC stream stopped")
+ return True
+ except Exception as e:
+ print(f"Failed to stop WebRTC stream: {e}")
+ return False
+
+ def get_webrtc_interface(self):
+ """
+ Get WebRTC Gradio interface for real-time streaming.
+
+ Returns:
+ WebRTCGradioInterface instance or None
+ """
+ if not self.enable_webrtc or not self.webrtc_analyzer:
+ return None
+
+ return WebRTCGradioInterface(self.webrtc_analyzer)
+
+ # SkateFormer methods moved to Version 2 development
+ # get_skateformer_compatibility() and get_skateformer_status_report()
+ # will be available in the next major release
+
+ def cleanup(self):
+ """Clean up temporary files and resources."""
+ # Clean up video input handler
+ if hasattr(self, 'video_input'):
+ self.video_input.cleanup()
+
+ # Stop WebRTC if running
+ if self.webrtc_analyzer and self.webrtc_analyzer.is_running:
+ self.stop_webrtc_stream()
+
+ def example_payload(self) -> Dict[str, Any]:
+ """Example input payload for documentation."""
+ return {
+ "video": {"path": "/path/to/video.mp4"},
+ "model": "mediapipe",
+ "enable_visualization": True,
+ "include_keypoints": False
+ }
+
+ def example_value(self) -> Dict[str, Any]:
+ """Example output value for documentation."""
+ return {
+ "json_output": {
+ "analysis_metadata": {
+ "timestamp": "2024-01-01T00:00:00",
+ "version": "1.0.0",
+ "model_info": {"name": "mediapipe", "type": "pose_estimation"}
+ },
+ "video_info": {
+ "fps": 30.0,
+ "duration_seconds": 5.0,
+ "width": 1920,
+ "height": 1080,
+ "frame_count": 150
+ },
+ "movement_analysis": {
+ "frame_count": 150,
+ "frames": [
+ {
+ "frame_index": 0,
+ "timestamp": 0.0,
+ "metrics": {
+ "direction": "stationary",
+ "intensity": "low",
+ "speed": "slow",
+ "velocity": 0.0,
+ "acceleration": 0.0,
+ "fluidity": 1.0,
+ "expansion": 0.5
+ }
+ }
+ ],
+ "summary": {
+ "direction": {
+ "distribution": {"stationary": 50, "up": 30, "down": 20},
+ "dominant": "stationary"
+ },
+ "intensity": {
+ "distribution": {"low": 80, "medium": 15, "high": 5},
+ "dominant": "low"
+ }
+ }
+ }
+ },
+ "video_output": "/tmp/visualization.mp4"
+ }
+
+ def api_info(self) -> Dict[str, Any]:
+ """API information for the component."""
+ return {
+ "type": "composite",
+ "description": "Video-based pose analysis with Laban Movement Analysis metrics",
+ "parameters": {
+ "video": {"type": "file", "description": "Input video file or URL (YouTube/Vimeo)"},
+ "model": {"type": "string", "description": "Pose model: mediapipe, movenet, or yolo variants"},
+ "enable_visualization": {"type": "boolean", "description": "Generate visualization video"},
+ "include_keypoints": {"type": "boolean", "description": "Include keypoints in JSON"}
+ },
+ "returns": {
+ "json_output": {"type": "object", "description": "LMA analysis results"},
+ "video_output": {"type": "file", "description": "Visualization video (optional)"}
+ },
+ "version_2_preview": {
+ "planned_features": ["SkateFormer AI integration", "Enhanced movement recognition", "Real-time analysis"],
+ "note": "Advanced AI features coming in Version 2!"
+ }
+ }
diff --git a/backend/gradio_labanmovementanalysis/labanmovementanalysis.pyi b/backend/gradio_labanmovementanalysis/labanmovementanalysis.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..656c55f038c5f93a177b5d1ce954a4e8a70418c6
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/labanmovementanalysis.pyi
@@ -0,0 +1,448 @@
+"""
+Custom Gradio v5 component for video-based pose analysis with LMA-inspired metrics.
+"""
+
+import gradio as gr
+from gradio.components.base import Component
+from typing import Dict, Any, Optional, Tuple, List, Union
+import tempfile
+import os
+import numpy as np
+
+from .video_utils import extract_frames, get_video_info
+from .pose_estimation import get_pose_estimator
+from .notation_engine import analyze_pose_sequence
+from .json_generator import generate_json, format_for_display
+from .visualizer import PoseVisualizer
+from .video_downloader import SmartVideoInput
+
+# Optional advanced features
+try:
+ from .skateformer_integration import SkateFormerAnalyzer
+ HAS_SKATEFORMER = True
+except ImportError:
+ HAS_SKATEFORMER = False
+
+try:
+ from .webrtc_handler import WebRTCMovementAnalyzer, WebRTCGradioInterface
+ HAS_WEBRTC = True
+except ImportError:
+ HAS_WEBRTC = False
+
+from gradio.events import Dependency
+
+class LabanMovementAnalysis(Component):
+ """
+ Gradio component for video-based pose analysis with Laban Movement Analysis metrics.
+ """
+
+ # Component metadata
+ COMPONENT_TYPE = "composite"
+ DEFAULT_MODEL = "mediapipe"
+
+ def __init__(self,
+ default_model: str = DEFAULT_MODEL,
+ enable_visualization: bool = True,
+ include_keypoints: bool = False,
+ enable_webrtc: bool = False,
+ label: Optional[str] = None,
+ every: Optional[float] = None,
+ show_label: Optional[bool] = None,
+ container: bool = True,
+ scale: Optional[int] = None,
+ min_width: int = 160,
+ interactive: Optional[bool] = None,
+ visible: bool = True,
+ elem_id: Optional[str] = None,
+ elem_classes: Optional[List[str]] = None,
+ render: bool = True,
+ **kwargs):
+ """
+ Initialize the Laban Movement Analysis component.
+
+ Args:
+ default_model: Default pose estimation model ("mediapipe", "movenet", "yolo")
+ enable_visualization: Whether to generate visualization video by default
+ include_keypoints: Whether to include raw keypoints in JSON output
+ enable_webrtc: Whether to enable WebRTC real-time analysis
+ label: Component label
+ ... (other standard Gradio component args)
+ """
+ super().__init__(
+ label=label,
+ every=every,
+ show_label=show_label,
+ container=container,
+ scale=scale,
+ min_width=min_width,
+ interactive=interactive,
+ visible=visible,
+ elem_id=elem_id,
+ elem_classes=elem_classes,
+ render=render,
+ **kwargs
+ )
+
+ self.default_model = default_model
+ self.enable_visualization = enable_visualization
+ self.include_keypoints = include_keypoints
+ self.enable_webrtc = enable_webrtc and HAS_WEBRTC
+
+ # Cache for pose estimators
+ self._estimators = {}
+
+ # Video input handler for URLs
+ self.video_input = SmartVideoInput()
+
+ # SkateFormer features reserved for Version 2
+
+ self.webrtc_analyzer = None
+ if self.enable_webrtc:
+ try:
+ self.webrtc_analyzer = WebRTCMovementAnalyzer(model=default_model)
+ except Exception as e:
+ print(f"Warning: Failed to initialize WebRTC: {e}")
+ self.enable_webrtc = False
+
+ def preprocess(self, payload: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Preprocess input from the frontend.
+
+ Args:
+ payload: Input data containing video file and options
+
+ Returns:
+ Processed data for analysis
+ """
+ if not payload:
+ return None
+
+ # Extract video file path
+ video_data = payload.get("video")
+ if not video_data:
+ return None
+
+ # Handle different input formats
+ if isinstance(video_data, str):
+ video_path = video_data
+ elif isinstance(video_data, dict):
+ video_path = video_data.get("path") or video_data.get("name")
+ else:
+ # Assume it's a file object
+ video_path = video_data.name if hasattr(video_data, "name") else str(video_data)
+
+ # Extract options
+ options = {
+ "video_path": video_path,
+ "model": payload.get("model", self.default_model),
+ "enable_visualization": payload.get("enable_visualization", self.enable_visualization),
+ "include_keypoints": payload.get("include_keypoints", self.include_keypoints)
+ }
+
+ return options
+
+ def postprocess(self, value: Any) -> Dict[str, Any]:
+ """
+ Postprocess analysis results for the frontend.
+
+ Args:
+ value: Analysis results
+
+ Returns:
+ Formatted output for display
+ """
+ if value is None:
+ return {"json_output": {}, "video_output": None}
+
+ # Ensure we have the expected format
+ if isinstance(value, tuple) and len(value) == 2:
+ json_data, video_path = value
+ else:
+ json_data = value
+ video_path = None
+
+ return {
+ "json_output": json_data,
+ "video_output": video_path
+ }
+
+ def process_video(self, video_input: Union[str, os.PathLike], model: str = DEFAULT_MODEL,
+ enable_visualization: bool = True,
+ include_keypoints: bool = False) -> Tuple[Dict[str, Any], Optional[str]]:
+ """
+ Main processing function that performs pose analysis on a video.
+
+ Args:
+ video_input: Path to input video, video URL (YouTube/Vimeo), or file object
+ model: Pose estimation model to use (supports enhanced syntax like "yolo-v11-s")
+ enable_visualization: Whether to generate visualization video
+ include_keypoints: Whether to include keypoints in JSON
+
+ Returns:
+ Tuple of (analysis_json, visualization_video_path)
+ """
+ # Handle video input (local file, URL, etc.)
+ try:
+ video_path, video_metadata = self.video_input.process_input(str(video_input))
+ print(f"Processing video: {video_metadata.get('title', 'Unknown')}")
+ if video_metadata.get('platform') in ['youtube', 'vimeo']:
+ print(f"Downloaded from {video_metadata['platform']}")
+ except Exception as e:
+ raise ValueError(f"Failed to process video input: {str(e)}")
+ # Get video metadata
+ frame_count, fps, (width, height) = get_video_info(video_path)
+
+ # Create or get pose estimator
+ if model not in self._estimators:
+ self._estimators[model] = get_pose_estimator(model)
+ estimator = self._estimators[model]
+
+ # Process video frame by frame
+ print(f"Processing {frame_count} frames with {model} model...")
+
+ all_frames = []
+ all_pose_results = []
+
+ for i, frame in enumerate(extract_frames(video_path)):
+ # Store frame if visualization is needed
+ if enable_visualization:
+ all_frames.append(frame)
+
+ # Detect poses
+ pose_results = estimator.detect(frame)
+
+ # Update frame indices
+ for result in pose_results:
+ result.frame_index = i
+
+ all_pose_results.append(pose_results)
+
+ # Progress indicator
+ if i % 30 == 0:
+ print(f"Processed {i}/{frame_count} frames...")
+
+ print("Analyzing movement patterns...")
+
+ # Analyze movement
+ movement_metrics = analyze_pose_sequence(all_pose_results, fps=fps)
+
+ # Enhanced AI analysis reserved for Version 2
+ print("LMA analysis complete - advanced AI features coming in Version 2!")
+
+ # Generate JSON output
+ video_metadata = {
+ "fps": fps,
+ "width": width,
+ "height": height,
+ "frame_count": frame_count,
+ "model_info": {
+ "name": model,
+ "type": "pose_estimation"
+ },
+ "input_metadata": video_metadata # Include video source metadata
+ }
+
+ json_output = generate_json(
+ movement_metrics,
+ all_pose_results if include_keypoints else None,
+ video_metadata,
+ include_keypoints=include_keypoints
+ )
+
+ # Enhanced AI analysis will be added in Version 2
+
+ # Generate visualization if requested
+ visualization_path = None
+ if enable_visualization:
+ print("Generating visualization video...")
+
+ # Create temporary output file
+ with tempfile.NamedTemporaryFile(suffix='.mp4', delete=False) as tmp:
+ visualization_path = tmp.name
+
+ # Create visualizer
+ visualizer = PoseVisualizer(
+ show_trails=True,
+ show_skeleton=True,
+ show_direction_arrows=True,
+ show_metrics=True
+ )
+
+ # Generate overlay video
+ visualization_path = visualizer.generate_overlay_video(
+ all_frames,
+ all_pose_results,
+ movement_metrics,
+ visualization_path,
+ fps
+ )
+
+ print(f"Visualization saved to: {visualization_path}")
+
+ return json_output, visualization_path
+
+ def __call__(self, video_path: str, **kwargs) -> Tuple[Dict[str, Any], Optional[str]]:
+ """
+ Make the component callable for easy use.
+
+ Args:
+ video_path: Path to video file
+ **kwargs: Additional options
+
+ Returns:
+ Analysis results
+ """
+ return self.process_video(video_path, **kwargs)
+
+ def start_webrtc_stream(self, model: str = None) -> bool:
+ """
+ Start WebRTC real-time analysis stream.
+
+ Args:
+ model: Pose model to use for real-time analysis
+
+ Returns:
+ True if stream started successfully
+ """
+ if not self.enable_webrtc or not self.webrtc_analyzer:
+ print("WebRTC not enabled or available")
+ return False
+
+ try:
+ if model:
+ self.webrtc_analyzer.model = model
+ self.webrtc_analyzer.pose_estimator = get_pose_estimator(model)
+
+ self.webrtc_analyzer.start_stream()
+ print(f"WebRTC stream started with {self.webrtc_analyzer.model} model")
+ return True
+ except Exception as e:
+ print(f"Failed to start WebRTC stream: {e}")
+ return False
+
+ def stop_webrtc_stream(self) -> bool:
+ """
+ Stop WebRTC real-time analysis stream.
+
+ Returns:
+ True if stream stopped successfully
+ """
+ if not self.webrtc_analyzer:
+ return False
+
+ try:
+ self.webrtc_analyzer.stop_stream()
+ print("WebRTC stream stopped")
+ return True
+ except Exception as e:
+ print(f"Failed to stop WebRTC stream: {e}")
+ return False
+
+ def get_webrtc_interface(self):
+ """
+ Get WebRTC Gradio interface for real-time streaming.
+
+ Returns:
+ WebRTCGradioInterface instance or None
+ """
+ if not self.enable_webrtc or not self.webrtc_analyzer:
+ return None
+
+ return WebRTCGradioInterface(self.webrtc_analyzer)
+
+ # SkateFormer methods moved to Version 2 development
+ # get_skateformer_compatibility() and get_skateformer_status_report()
+ # will be available in the next major release
+
+ def cleanup(self):
+ """Clean up temporary files and resources."""
+ # Clean up video input handler
+ if hasattr(self, 'video_input'):
+ self.video_input.cleanup()
+
+ # Stop WebRTC if running
+ if self.webrtc_analyzer and self.webrtc_analyzer.is_running:
+ self.stop_webrtc_stream()
+
+ def example_payload(self) -> Dict[str, Any]:
+ """Example input payload for documentation."""
+ return {
+ "video": {"path": "/path/to/video.mp4"},
+ "model": "mediapipe",
+ "enable_visualization": True,
+ "include_keypoints": False
+ }
+
+ def example_value(self) -> Dict[str, Any]:
+ """Example output value for documentation."""
+ return {
+ "json_output": {
+ "analysis_metadata": {
+ "timestamp": "2024-01-01T00:00:00",
+ "version": "1.0.0",
+ "model_info": {"name": "mediapipe", "type": "pose_estimation"}
+ },
+ "video_info": {
+ "fps": 30.0,
+ "duration_seconds": 5.0,
+ "width": 1920,
+ "height": 1080,
+ "frame_count": 150
+ },
+ "movement_analysis": {
+ "frame_count": 150,
+ "frames": [
+ {
+ "frame_index": 0,
+ "timestamp": 0.0,
+ "metrics": {
+ "direction": "stationary",
+ "intensity": "low",
+ "speed": "slow",
+ "velocity": 0.0,
+ "acceleration": 0.0,
+ "fluidity": 1.0,
+ "expansion": 0.5
+ }
+ }
+ ],
+ "summary": {
+ "direction": {
+ "distribution": {"stationary": 50, "up": 30, "down": 20},
+ "dominant": "stationary"
+ },
+ "intensity": {
+ "distribution": {"low": 80, "medium": 15, "high": 5},
+ "dominant": "low"
+ }
+ }
+ }
+ },
+ "video_output": "/tmp/visualization.mp4"
+ }
+
+ def api_info(self) -> Dict[str, Any]:
+ """API information for the component."""
+ return {
+ "type": "composite",
+ "description": "Video-based pose analysis with Laban Movement Analysis metrics",
+ "parameters": {
+ "video": {"type": "file", "description": "Input video file or URL (YouTube/Vimeo)"},
+ "model": {"type": "string", "description": "Pose model: mediapipe, movenet, or yolo variants"},
+ "enable_visualization": {"type": "boolean", "description": "Generate visualization video"},
+ "include_keypoints": {"type": "boolean", "description": "Include keypoints in JSON"}
+ },
+ "returns": {
+ "json_output": {"type": "object", "description": "LMA analysis results"},
+ "video_output": {"type": "file", "description": "Visualization video (optional)"}
+ },
+ "version_2_preview": {
+ "planned_features": ["SkateFormer AI integration", "Enhanced movement recognition", "Real-time analysis"],
+ "note": "Advanced AI features coming in Version 2!"
+ }
+ }
+ from typing import Callable, Literal, Sequence, Any, TYPE_CHECKING
+ from gradio.blocks import Block
+ if TYPE_CHECKING:
+ from gradio.components import Timer
+ from gradio.components.base import Component
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/notation_engine.py b/backend/gradio_labanmovementanalysis/notation_engine.py
new file mode 100644
index 0000000000000000000000000000000000000000..663e373851f26c3192a864a0fbcc473b97f48b26
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/notation_engine.py
@@ -0,0 +1,317 @@
+"""
+Laban Movement Analysis (LMA) inspired notation engine.
+Computes movement metrics like direction, intensity, and speed from pose keypoints.
+"""
+
+import numpy as np
+from typing import List, Dict, Optional, Tuple, Any
+from dataclasses import dataclass
+from enum import Enum
+
+from .pose_estimation import PoseResult, Keypoint
+
+
+class Direction(Enum):
+ """Movement direction categories."""
+ UP = "up"
+ DOWN = "down"
+ LEFT = "left"
+ RIGHT = "right"
+ FORWARD = "forward"
+ BACKWARD = "backward"
+ STATIONARY = "stationary"
+
+
+class Intensity(Enum):
+ """Movement intensity levels."""
+ LOW = "low"
+ MEDIUM = "medium"
+ HIGH = "high"
+
+
+class Speed(Enum):
+ """Movement speed categories."""
+ SLOW = "slow"
+ MODERATE = "moderate"
+ FAST = "fast"
+
+
+@dataclass
+class MovementMetrics:
+ """LMA-inspired movement metrics for a frame or segment."""
+ frame_index: int
+ timestamp: Optional[float] = None
+
+ # Primary metrics
+ direction: Direction = Direction.STATIONARY
+ intensity: Intensity = Intensity.LOW
+ speed: Speed = Speed.SLOW
+
+ # Numeric values
+ velocity: float = 0.0 # pixels/second or normalized units
+ acceleration: float = 0.0
+
+ # Additional qualities
+ fluidity: float = 0.0 # 0-1, smoothness of movement
+ expansion: float = 0.0 # 0-1, how spread out the pose is
+
+ # Raw displacement data
+ center_displacement: Optional[Tuple[float, float]] = None
+ total_displacement: float = 0.0
+
+
+class MovementAnalyzer:
+ """Analyzes pose sequences to extract LMA-style movement metrics."""
+
+ def __init__(self, fps: float = 30.0,
+ velocity_threshold_slow: float = 0.01,
+ velocity_threshold_fast: float = 0.1,
+ intensity_accel_threshold: float = 0.05):
+ """
+ Initialize movement analyzer.
+
+ Args:
+ fps: Frames per second of the video
+ velocity_threshold_slow: Threshold for slow movement (normalized)
+ velocity_threshold_fast: Threshold for fast movement (normalized)
+ intensity_accel_threshold: Acceleration threshold for intensity
+ """
+ self.fps = fps
+ self.frame_duration = 1.0 / fps
+ self.velocity_threshold_slow = velocity_threshold_slow
+ self.velocity_threshold_fast = velocity_threshold_fast
+ self.intensity_accel_threshold = intensity_accel_threshold
+
+ def analyze_movement(self, pose_sequence: List[List[PoseResult]]) -> List[MovementMetrics]:
+ """
+ Analyze a sequence of poses to compute movement metrics.
+
+ Args:
+ pose_sequence: List of pose results per frame
+
+ Returns:
+ List of movement metrics per frame
+ """
+ if not pose_sequence:
+ return []
+
+ metrics = []
+ prev_centers = None
+ prev_velocity = None
+
+ for frame_idx, frame_poses in enumerate(pose_sequence):
+ if not frame_poses:
+ # No pose detected in this frame
+ metrics.append(MovementMetrics(
+ frame_index=frame_idx,
+ timestamp=frame_idx * self.frame_duration
+ ))
+ continue
+
+ # For now, analyze first person only
+ # TODO: Extend to multi-person analysis
+ pose = frame_poses[0]
+
+ # Compute body center and limb positions
+ center = self._compute_body_center(pose.keypoints)
+ limb_positions = self._get_limb_positions(pose.keypoints)
+
+ # Initialize metrics for this frame
+ frame_metrics = MovementMetrics(
+ frame_index=frame_idx,
+ timestamp=frame_idx * self.frame_duration
+ )
+
+ if prev_centers is not None and frame_idx > 0:
+ # Compute displacement and velocity
+ displacement = (
+ center[0] - prev_centers[0],
+ center[1] - prev_centers[1]
+ )
+ frame_metrics.center_displacement = displacement
+ frame_metrics.total_displacement = np.sqrt(
+ displacement[0]**2 + displacement[1]**2
+ )
+
+ # Velocity (normalized units per second)
+ frame_metrics.velocity = frame_metrics.total_displacement * self.fps
+
+ # Direction
+ frame_metrics.direction = self._compute_direction(displacement)
+
+ # Speed category
+ frame_metrics.speed = self._categorize_speed(frame_metrics.velocity)
+
+ # Acceleration and intensity
+ if prev_velocity is not None:
+ frame_metrics.acceleration = abs(
+ frame_metrics.velocity - prev_velocity
+ ) * self.fps
+ frame_metrics.intensity = self._compute_intensity(
+ frame_metrics.acceleration,
+ frame_metrics.velocity
+ )
+
+ # Fluidity (based on acceleration smoothness)
+ frame_metrics.fluidity = self._compute_fluidity(
+ frame_metrics.acceleration
+ )
+
+ # Expansion (how spread out the pose is)
+ frame_metrics.expansion = self._compute_expansion(pose.keypoints)
+
+ metrics.append(frame_metrics)
+
+ # Update previous values
+ prev_centers = center
+ prev_velocity = frame_metrics.velocity
+
+ # Post-process to smooth metrics if needed
+ metrics = self._smooth_metrics(metrics)
+
+ return metrics
+
+ def _compute_body_center(self, keypoints: List[Keypoint]) -> Tuple[float, float]:
+ """Compute the center of mass of the body."""
+ # Use major body joints for center calculation
+ major_joints = ["left_hip", "right_hip", "left_shoulder", "right_shoulder"]
+
+ x_coords = []
+ y_coords = []
+
+ for kp in keypoints:
+ if kp.name in major_joints and kp.confidence > 0.5:
+ x_coords.append(kp.x)
+ y_coords.append(kp.y)
+
+ if not x_coords:
+ # Fallback to all keypoints
+ x_coords = [kp.x for kp in keypoints if kp.confidence > 0.3]
+ y_coords = [kp.y for kp in keypoints if kp.confidence > 0.3]
+
+ if x_coords:
+ return (np.mean(x_coords), np.mean(y_coords))
+ return (0.5, 0.5) # Default center
+
+ def _get_limb_positions(self, keypoints: List[Keypoint]) -> Dict[str, Tuple[float, float]]:
+ """Get positions of major limbs."""
+ positions = {}
+ for kp in keypoints:
+ if kp.confidence > 0.3:
+ positions[kp.name] = (kp.x, kp.y)
+ return positions
+
+ def _compute_direction(self, displacement: Tuple[float, float]) -> Direction:
+ """Compute movement direction from displacement vector."""
+ dx, dy = displacement
+
+ # Threshold for considering movement
+ threshold = 0.005
+
+ if abs(dx) < threshold and abs(dy) < threshold:
+ return Direction.STATIONARY
+
+ # Determine primary direction
+ if abs(dx) > abs(dy):
+ return Direction.RIGHT if dx > 0 else Direction.LEFT
+ else:
+ return Direction.DOWN if dy > 0 else Direction.UP
+
+ def _categorize_speed(self, velocity: float) -> Speed:
+ """Categorize velocity into speed levels."""
+ if velocity < self.velocity_threshold_slow:
+ return Speed.SLOW
+ elif velocity < self.velocity_threshold_fast:
+ return Speed.FAST
+ else:
+ return Speed.FAST
+
+ def _compute_intensity(self, acceleration: float, velocity: float) -> Intensity:
+ """Compute movement intensity based on acceleration and velocity."""
+ # High acceleration or high velocity indicates high intensity
+ if acceleration > self.intensity_accel_threshold * 2 or velocity > self.velocity_threshold_fast:
+ return Intensity.HIGH
+ elif acceleration > self.intensity_accel_threshold or velocity > self.velocity_threshold_slow:
+ return Intensity.MEDIUM
+ else:
+ return Intensity.LOW
+
+ def _compute_fluidity(self, acceleration: float) -> float:
+ """
+ Compute fluidity score (0-1) based on acceleration.
+ Lower acceleration = higher fluidity (smoother movement).
+ """
+ # Normalize acceleration to 0-1 range
+ max_accel = 0.2 # Expected maximum acceleration
+ norm_accel = min(acceleration / max_accel, 1.0)
+
+ # Invert so low acceleration = high fluidity
+ return 1.0 - norm_accel
+
+ def _compute_expansion(self, keypoints: List[Keypoint]) -> float:
+ """
+ Compute how expanded/contracted the pose is.
+ Returns 0-1 where 1 is fully expanded.
+ """
+ # Calculate distances between opposite limbs
+ limb_pairs = [
+ ("left_wrist", "right_wrist"),
+ ("left_ankle", "right_ankle"),
+ ("left_wrist", "left_ankle"),
+ ("right_wrist", "right_ankle")
+ ]
+
+ kp_dict = {kp.name: kp for kp in keypoints if kp.confidence > 0.3}
+
+ distances = []
+ for limb1, limb2 in limb_pairs:
+ if limb1 in kp_dict and limb2 in kp_dict:
+ kp1 = kp_dict[limb1]
+ kp2 = kp_dict[limb2]
+ dist = np.sqrt((kp1.x - kp2.x)**2 + (kp1.y - kp2.y)**2)
+ distances.append(dist)
+
+ if distances:
+ # Normalize by expected maximum distance
+ avg_dist = np.mean(distances)
+ max_expected = 1.4 # Diagonal of normalized space
+ return min(avg_dist / max_expected, 1.0)
+
+ return 0.5 # Default neutral expansion
+
+ def _smooth_metrics(self, metrics: List[MovementMetrics]) -> List[MovementMetrics]:
+ """Apply smoothing to reduce noise in metrics."""
+ # Simple moving average for numeric values
+ window_size = 3
+
+ if len(metrics) <= window_size:
+ return metrics
+
+ # Smooth velocity and acceleration
+ for i in range(window_size, len(metrics)):
+ velocities = [m.velocity for m in metrics[i-window_size:i+1]]
+ metrics[i].velocity = np.mean(velocities)
+
+ accels = [m.acceleration for m in metrics[i-window_size:i+1]]
+ metrics[i].acceleration = np.mean(accels)
+
+ fluidities = [m.fluidity for m in metrics[i-window_size:i+1]]
+ metrics[i].fluidity = np.mean(fluidities)
+
+ return metrics
+
+
+def analyze_pose_sequence(pose_sequence: List[List[PoseResult]],
+ fps: float = 30.0) -> List[MovementMetrics]:
+ """
+ Convenience function to analyze a pose sequence.
+
+ Args:
+ pose_sequence: List of pose results per frame
+ fps: Video frame rate
+
+ Returns:
+ List of movement metrics
+ """
+ analyzer = MovementAnalyzer(fps=fps)
+ return analyzer.analyze_movement(pose_sequence)
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/pose_estimation.py b/backend/gradio_labanmovementanalysis/pose_estimation.py
new file mode 100644
index 0000000000000000000000000000000000000000..4793813e697a57a7e143c262f4423b9f0f356169
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/pose_estimation.py
@@ -0,0 +1,380 @@
+"""
+Model-agnostic pose estimation interface with adapters for various pose detection models.
+Each adapter provides a uniform interface for pose estimation, abstracting model-specific details.
+"""
+
+from abc import ABC, abstractmethod
+import numpy as np
+from typing import List, Dict, Optional, Tuple, Any
+from dataclasses import dataclass
+
+
+@dataclass
+class Keypoint:
+ """Represents a single pose keypoint."""
+ x: float # Normalized x coordinate (0-1)
+ y: float # Normalized y coordinate (0-1)
+ confidence: float # Confidence score (0-1)
+ name: Optional[str] = None # Joint name (e.g., "left_shoulder")
+
+
+@dataclass
+class PoseResult:
+ """Result of pose estimation for a single frame."""
+ keypoints: List[Keypoint]
+ frame_index: int
+ timestamp: Optional[float] = None
+ person_id: Optional[int] = None # For multi-person tracking
+
+
+class PoseEstimator(ABC):
+ """Abstract base class for pose estimation models."""
+
+ @abstractmethod
+ def detect(self, frame: np.ndarray) -> List[PoseResult]:
+ """
+ Detect poses in a single frame.
+
+ Args:
+ frame: Input frame as numpy array (BGR format)
+
+ Returns:
+ List of PoseResult objects (one per detected person)
+ """
+ pass
+
+ @abstractmethod
+ def get_keypoint_names(self) -> List[str]:
+ """Get the list of keypoint names this model provides."""
+ pass
+
+ def detect_batch(self, frames: List[np.ndarray]) -> List[List[PoseResult]]:
+ """
+ Process multiple frames (default implementation processes sequentially).
+
+ Args:
+ frames: List of frames
+
+ Returns:
+ List of results per frame
+ """
+ results = []
+ for i, frame in enumerate(frames):
+ frame_results = self.detect(frame)
+ # Update frame indices
+ for result in frame_results:
+ result.frame_index = i
+ results.append(frame_results)
+ return results
+
+
+class MoveNetPoseEstimator(PoseEstimator):
+ """MoveNet pose estimation adapter (TensorFlow-based)."""
+
+ # COCO keypoint names used by MoveNet
+ KEYPOINT_NAMES = [
+ "nose", "left_eye", "right_eye", "left_ear", "right_ear",
+ "left_shoulder", "right_shoulder", "left_elbow", "right_elbow",
+ "left_wrist", "right_wrist", "left_hip", "right_hip",
+ "left_knee", "right_knee", "left_ankle", "right_ankle"
+ ]
+
+ def __init__(self, model_variant: str = "lightning"):
+ """
+ Initialize MoveNet model.
+
+ Args:
+ model_variant: "lightning" (faster) or "thunder" (more accurate)
+ """
+ self.model_variant = model_variant
+ self.model = None
+ self._load_model()
+
+ def _load_model(self):
+ """Load MoveNet model using TensorFlow."""
+ try:
+ import tensorflow as tf
+ import tensorflow_hub as hub
+
+ # Model URLs for different variants
+ model_urls = {
+ "lightning": "https://tfhub.dev/google/movenet/singlepose/lightning/4",
+ "thunder": "https://tfhub.dev/google/movenet/singlepose/thunder/4"
+ }
+
+ self.model = hub.load(model_urls[self.model_variant])
+ self.movenet = self.model.signatures['serving_default']
+
+ except ImportError:
+ raise ImportError("TensorFlow and tensorflow_hub required for MoveNet. "
+ "Install with: pip install tensorflow tensorflow_hub")
+
+ def detect(self, frame: np.ndarray) -> List[PoseResult]:
+ """Detect pose using MoveNet."""
+ if self.model is None:
+ self._load_model()
+
+ import tensorflow as tf
+
+ # Prepare input
+ height, width = frame.shape[:2]
+
+ # MoveNet expects RGB
+ rgb_frame = frame[:, :, ::-1] # BGR to RGB
+
+ # Resize and normalize
+ input_size = 192 if self.model_variant == "lightning" else 256
+ input_image = tf.image.resize_with_pad(
+ tf.expand_dims(rgb_frame, axis=0), input_size, input_size
+ )
+ input_image = tf.cast(input_image, dtype=tf.int32)
+
+ # Run inference
+ outputs = self.movenet(input_image)
+ keypoints_with_scores = outputs['output_0'].numpy()[0, 0, :, :]
+
+ # Convert to our format
+ keypoints = []
+ for i, (y, x, score) in enumerate(keypoints_with_scores):
+ keypoints.append(Keypoint(
+ x=float(x),
+ y=float(y),
+ confidence=float(score),
+ name=self.KEYPOINT_NAMES[i]
+ ))
+
+ return [PoseResult(keypoints=keypoints, frame_index=0)]
+
+ def get_keypoint_names(self) -> List[str]:
+ return self.KEYPOINT_NAMES.copy()
+
+
+class MediaPipePoseEstimator(PoseEstimator):
+ """MediaPipe Pose (BlazePose) estimation adapter."""
+
+ # MediaPipe landmark names
+ LANDMARK_NAMES = [
+ "nose", "left_eye_inner", "left_eye", "left_eye_outer",
+ "right_eye_inner", "right_eye", "right_eye_outer",
+ "left_ear", "right_ear", "mouth_left", "mouth_right",
+ "left_shoulder", "right_shoulder", "left_elbow", "right_elbow",
+ "left_wrist", "right_wrist", "left_pinky", "right_pinky",
+ "left_index", "right_index", "left_thumb", "right_thumb",
+ "left_hip", "right_hip", "left_knee", "right_knee",
+ "left_ankle", "right_ankle", "left_heel", "right_heel",
+ "left_foot_index", "right_foot_index"
+ ]
+
+ def __init__(self, model_complexity: int = 1, min_detection_confidence: float = 0.5):
+ """
+ Initialize MediaPipe Pose.
+
+ Args:
+ model_complexity: 0 (lite), 1 (full), or 2 (heavy)
+ min_detection_confidence: Minimum confidence for detection
+ """
+ self.model_complexity = model_complexity
+ self.min_detection_confidence = min_detection_confidence
+ self.pose = None
+ self._initialize()
+
+ def _initialize(self):
+ """Initialize MediaPipe Pose."""
+ try:
+ import mediapipe as mp
+ self.mp_pose = mp.solutions.pose
+ self.pose = self.mp_pose.Pose(
+ static_image_mode=False,
+ model_complexity=self.model_complexity,
+ min_detection_confidence=self.min_detection_confidence,
+ min_tracking_confidence=0.5
+ )
+ except ImportError:
+ raise ImportError("MediaPipe required. Install with: pip install mediapipe")
+
+ def detect(self, frame: np.ndarray) -> List[PoseResult]:
+ """Detect pose using MediaPipe."""
+ if self.pose is None:
+ self._initialize()
+
+ # MediaPipe expects RGB
+ rgb_frame = frame[:, :, ::-1] # BGR to RGB
+ height, width = frame.shape[:2]
+
+ # Process frame
+ results = self.pose.process(rgb_frame)
+
+ if not results.pose_landmarks:
+ return []
+
+ # Convert landmarks to keypoints
+ keypoints = []
+ for i, landmark in enumerate(results.pose_landmarks.landmark):
+ keypoints.append(Keypoint(
+ x=landmark.x,
+ y=landmark.y,
+ confidence=landmark.visibility if hasattr(landmark, 'visibility') else 1.0,
+ name=self.LANDMARK_NAMES[i] if i < len(self.LANDMARK_NAMES) else f"landmark_{i}"
+ ))
+
+ return [PoseResult(keypoints=keypoints, frame_index=0)]
+
+ def get_keypoint_names(self) -> List[str]:
+ return self.LANDMARK_NAMES.copy()
+
+ def __del__(self):
+ """Clean up MediaPipe resources."""
+ if self.pose:
+ self.pose.close()
+
+
+class YOLOPoseEstimator(PoseEstimator):
+ """YOLO-based pose estimation adapter (supports YOLOv8 and YOLOv11)."""
+
+ # COCO keypoint format used by YOLO
+ KEYPOINT_NAMES = [
+ "nose", "left_eye", "right_eye", "left_ear", "right_ear",
+ "left_shoulder", "right_shoulder", "left_elbow", "right_elbow",
+ "left_wrist", "right_wrist", "left_hip", "right_hip",
+ "left_knee", "right_knee", "left_ankle", "right_ankle"
+ ]
+
+ def __init__(self, model_version: str = "v11", model_size: str = "n", confidence_threshold: float = 0.25):
+ """
+ Initialize YOLO pose model.
+
+ Args:
+ model_version: "v8" or "v11"
+ model_size: Model size - "n" (nano), "s" (small), "m" (medium), "l" (large), "x" (xlarge)
+ confidence_threshold: Minimum confidence for detections
+ """
+ self.model_version = model_version
+ self.model_size = model_size
+ self.confidence_threshold = confidence_threshold
+ self.model = None
+
+ # Determine model path
+ if model_version == "v8":
+ self.model_path = f"yolov8{model_size}-pose.pt"
+ else: # v11
+ self.model_path = f"yolo11{model_size}-pose.pt"
+
+ self._load_model()
+
+ def _load_model(self):
+ """Load YOLO model."""
+ try:
+ from ultralytics import YOLO
+ self.model = YOLO(self.model_path)
+ except ImportError:
+ raise ImportError("Ultralytics required for YOLO. "
+ "Install with: pip install ultralytics")
+
+ def detect(self, frame: np.ndarray) -> List[PoseResult]:
+ """Detect poses using YOLO."""
+ if self.model is None:
+ self._load_model()
+
+ # Run inference
+ results = self.model(frame, conf=self.confidence_threshold)
+
+ pose_results = []
+
+ # Process each detection
+ for r in results:
+ if r.keypoints is not None:
+ for person_idx, keypoints_data in enumerate(r.keypoints.data):
+ keypoints = []
+
+ # YOLO returns keypoints as [x, y, conf]
+ height, width = frame.shape[:2]
+ for i, (x, y, conf) in enumerate(keypoints_data):
+ keypoints.append(Keypoint(
+ x=float(x) / width, # Normalize to 0-1
+ y=float(y) / height, # Normalize to 0-1
+ confidence=float(conf),
+ name=self.KEYPOINT_NAMES[i] if i < len(self.KEYPOINT_NAMES) else f"joint_{i}"
+ ))
+
+ pose_results.append(PoseResult(
+ keypoints=keypoints,
+ frame_index=0,
+ person_id=person_idx
+ ))
+
+ return pose_results
+
+ def get_keypoint_names(self) -> List[str]:
+ return self.KEYPOINT_NAMES.copy()
+
+
+# Note: Sapiens models removed due to complex setup requirements
+# They require the official repository and cannot be integrated cleanly
+# with the agent/MCP pipeline without significant complexity
+
+
+def create_pose_estimator(model_type: str, **kwargs) -> PoseEstimator:
+ """
+ Factory function to create pose estimator instances.
+
+ Args:
+ model_type: One of "movenet", "mediapipe", "yolo"
+ **kwargs: Model-specific parameters
+
+ Returns:
+ PoseEstimator instance
+ """
+ estimators = {
+ "movenet": MoveNetPoseEstimator,
+ "mediapipe": MediaPipePoseEstimator,
+ "yolo": YOLOPoseEstimator,
+ }
+
+ if model_type not in estimators:
+ raise ValueError(f"Unknown model type: {model_type}. "
+ f"Available: {list(estimators.keys())}")
+
+ return estimators[model_type](**kwargs)
+
+
+def get_pose_estimator(model_spec: str) -> PoseEstimator:
+ """
+ Get pose estimator from model specification string.
+
+ Args:
+ model_spec: Model specification string, e.g.:
+ - "mediapipe" or "mediapipe-lite" or "mediapipe-full" or "mediapipe-heavy"
+ - "movenet-lightning" or "movenet-thunder"
+ - "yolo-v8-n" or "yolo-v11-s" etc.
+
+ Returns:
+ PoseEstimator instance
+ """
+ model_spec = model_spec.lower()
+
+ # MediaPipe variants
+ if model_spec.startswith("mediapipe"):
+ complexity_map = {
+ "mediapipe-lite": 0,
+ "mediapipe-full": 1,
+ "mediapipe-heavy": 2,
+ "mediapipe": 1 # default
+ }
+ complexity = complexity_map.get(model_spec, 1)
+ return create_pose_estimator("mediapipe", model_complexity=complexity)
+
+ # MoveNet variants
+ elif model_spec.startswith("movenet"):
+ variant = "lightning" if "lightning" in model_spec else "thunder"
+ return create_pose_estimator("movenet", model_variant=variant)
+
+ # YOLO variants
+ elif model_spec.startswith("yolo"):
+ parts = model_spec.split("-")
+ version = "v8" if "v8" in model_spec else "v11"
+ size = parts[-1] if len(parts) > 2 else "n"
+ return create_pose_estimator("yolo", model_version=version, model_size=size)
+
+ # Legacy format support
+ else:
+ return create_pose_estimator(model_spec)
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/video_downloader.py b/backend/gradio_labanmovementanalysis/video_downloader.py
new file mode 100644
index 0000000000000000000000000000000000000000..07eeb1f4d59483199c6246f5d6a981384b4ad346
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/video_downloader.py
@@ -0,0 +1,295 @@
+"""
+Video downloader for YouTube, Vimeo and other platforms
+"""
+
+import os
+import re
+import tempfile
+import logging
+from typing import Optional, Tuple, Dict, Any
+from urllib.parse import urlparse, parse_qs
+import subprocess
+
+logger = logging.getLogger(__name__)
+
+
+class VideoDownloader:
+ """Download videos from various platforms"""
+
+ # Gradio component compatibility
+ events = {}
+
+ def __init__(self, temp_dir: Optional[str] = None):
+ """
+ Initialize video downloader.
+
+ Args:
+ temp_dir: Directory for temporary files
+ """
+ self.temp_dir = temp_dir or tempfile.mkdtemp(prefix="laban_video_")
+ self.supported_platforms = {
+ 'youtube': self._download_youtube,
+ 'vimeo': self._download_vimeo,
+ 'direct': self._download_direct
+ }
+
+ def download(self, url: str) -> Tuple[str, Dict[str, Any]]:
+ """
+ Download video from URL.
+
+ Args:
+ url: Video URL (YouTube, Vimeo, or direct video link)
+
+ Returns:
+ Tuple of (local_path, metadata)
+ """
+ platform = self._detect_platform(url)
+
+ if platform not in self.supported_platforms:
+ raise ValueError(f"Unsupported platform: {platform}")
+
+ logger.info(f"Downloading video from {platform}: {url}")
+
+ try:
+ return self.supported_platforms[platform](url)
+ except Exception as e:
+ logger.error(f"Failed to download video: {str(e)}")
+ raise
+
+ def _detect_platform(self, url: str) -> str:
+ """Detect video platform from URL"""
+ domain = urlparse(url).netloc.lower()
+
+ if 'youtube.com' in domain or 'youtu.be' in domain:
+ return 'youtube'
+ elif 'vimeo.com' in domain:
+ return 'vimeo'
+ elif url.endswith(('.mp4', '.avi', '.mov', '.webm')):
+ return 'direct'
+ else:
+ # Try to determine if it's a direct video link
+ return 'direct'
+
+ def _download_youtube(self, url: str) -> Tuple[str, Dict[str, Any]]:
+ """Download video from YouTube using yt-dlp"""
+ try:
+ import yt_dlp
+ except ImportError:
+ raise ImportError("yt-dlp is required for YouTube downloads. Install with: pip install yt-dlp")
+
+ # Extract video ID
+ video_id = self._extract_youtube_id(url)
+ output_path = os.path.join(self.temp_dir, f"youtube_{video_id}.mp4")
+
+ # yt-dlp options
+ ydl_opts = {
+ 'format': 'best[height<=720][ext=mp4]/best[height<=720]/best', # Limit to 720p for performance
+ 'outtmpl': output_path,
+ 'quiet': True,
+ 'no_warnings': True,
+ 'extract_flat': False,
+ }
+
+ metadata = {}
+
+ with yt_dlp.YoutubeDL(ydl_opts) as ydl:
+ try:
+ # Extract info
+ info = ydl.extract_info(url, download=True)
+
+ # Store metadata
+ metadata = {
+ 'title': info.get('title', 'Unknown'),
+ 'duration': info.get('duration', 0),
+ 'uploader': info.get('uploader', 'Unknown'),
+ 'view_count': info.get('view_count', 0),
+ 'description': info.get('description', ''),
+ 'platform': 'youtube',
+ 'video_id': video_id
+ }
+
+ logger.info(f"Downloaded YouTube video: {metadata['title']}")
+
+ except Exception as e:
+ raise Exception(f"Failed to download YouTube video: {str(e)}")
+
+ return output_path, metadata
+
+ def _download_vimeo(self, url: str) -> Tuple[str, Dict[str, Any]]:
+ """Download video from Vimeo using yt-dlp"""
+ try:
+ import yt_dlp
+ except ImportError:
+ raise ImportError("yt-dlp is required for Vimeo downloads. Install with: pip install yt-dlp")
+
+ # Extract video ID
+ video_id = self._extract_vimeo_id(url)
+ output_path = os.path.join(self.temp_dir, f"vimeo_{video_id}.mp4")
+
+ # yt-dlp options
+ ydl_opts = {
+ 'format': 'best[height<=720][ext=mp4]/best[height<=720]/best',
+ 'outtmpl': output_path,
+ 'quiet': True,
+ 'no_warnings': True,
+ }
+
+ metadata = {}
+
+ with yt_dlp.YoutubeDL(ydl_opts) as ydl:
+ try:
+ # Extract info
+ info = ydl.extract_info(url, download=True)
+
+ # Store metadata
+ metadata = {
+ 'title': info.get('title', 'Unknown'),
+ 'duration': info.get('duration', 0),
+ 'uploader': info.get('uploader', 'Unknown'),
+ 'description': info.get('description', ''),
+ 'platform': 'vimeo',
+ 'video_id': video_id
+ }
+
+ logger.info(f"Downloaded Vimeo video: {metadata['title']}")
+
+ except Exception as e:
+ raise Exception(f"Failed to download Vimeo video: {str(e)}")
+
+ return output_path, metadata
+
+ def _download_direct(self, url: str) -> Tuple[str, Dict[str, Any]]:
+ """Download video from direct URL"""
+ import requests
+
+ # Generate filename from URL
+ filename = os.path.basename(urlparse(url).path) or "video.mp4"
+ output_path = os.path.join(self.temp_dir, filename)
+
+ try:
+ # Download with streaming
+ response = requests.get(url, stream=True)
+ response.raise_for_status()
+
+ # Get content length
+ total_size = int(response.headers.get('content-length', 0))
+
+ # Write to file
+ with open(output_path, 'wb') as f:
+ downloaded = 0
+ for chunk in response.iter_content(chunk_size=8192):
+ if chunk:
+ f.write(chunk)
+ downloaded += len(chunk)
+
+ # Progress logging
+ if total_size > 0:
+ progress = (downloaded / total_size) * 100
+ if int(progress) % 10 == 0:
+ logger.debug(f"Download progress: {progress:.1f}%")
+
+ metadata = {
+ 'title': filename,
+ 'platform': 'direct',
+ 'url': url,
+ 'size': total_size
+ }
+
+ logger.info(f"Downloaded direct video: {filename}")
+
+ except Exception as e:
+ raise Exception(f"Failed to download direct video: {str(e)}")
+
+ return output_path, metadata
+
+ def _extract_youtube_id(self, url: str) -> str:
+ """Extract YouTube video ID from URL"""
+ patterns = [
+ r'(?:v=|\/)([0-9A-Za-z_-]{11}).*',
+ r'(?:embed\/)([0-9A-Za-z_-]{11})',
+ r'(?:watch\?v=)([0-9A-Za-z_-]{11})',
+ r'youtu\.be\/([0-9A-Za-z_-]{11})'
+ ]
+
+ for pattern in patterns:
+ match = re.search(pattern, url)
+ if match:
+ return match.group(1)
+
+ raise ValueError(f"Could not extract YouTube video ID from: {url}")
+
+ def _extract_vimeo_id(self, url: str) -> str:
+ """Extract Vimeo video ID from URL"""
+ patterns = [
+ r'vimeo\.com\/(\d+)',
+ r'player\.vimeo\.com\/video\/(\d+)'
+ ]
+
+ for pattern in patterns:
+ match = re.search(pattern, url)
+ if match:
+ return match.group(1)
+
+ raise ValueError(f"Could not extract Vimeo video ID from: {url}")
+
+ def cleanup(self):
+ """Clean up temporary files"""
+ import shutil
+ if os.path.exists(self.temp_dir):
+ try:
+ shutil.rmtree(self.temp_dir)
+ logger.info(f"Cleaned up temporary directory: {self.temp_dir}")
+ except Exception as e:
+ logger.warning(f"Failed to clean up temporary directory: {str(e)}")
+
+
+class SmartVideoInput:
+ """Smart video input handler that supports URLs and local files"""
+
+ events = {} # Gradio component compatibility
+
+ def __init__(self):
+ self.downloader = VideoDownloader()
+ self._temp_files = []
+
+ def process_input(self, input_path: str) -> Tuple[str, Dict[str, Any]]:
+ """
+ Process video input - can be local file or URL.
+
+ Args:
+ input_path: Local file path or video URL
+
+ Returns:
+ Tuple of (local_path, metadata)
+ """
+ # Check if it's a URL
+ if input_path.startswith(('http://', 'https://', 'www.')):
+ # Download video
+ local_path, metadata = self.downloader.download(input_path)
+ self._temp_files.append(local_path)
+ return local_path, metadata
+ else:
+ # Local file
+ if not os.path.exists(input_path):
+ raise FileNotFoundError(f"Video file not found: {input_path}")
+
+ metadata = {
+ 'title': os.path.basename(input_path),
+ 'platform': 'local',
+ 'path': input_path
+ }
+
+ return input_path, metadata
+
+ def cleanup(self):
+ """Clean up temporary files"""
+ for temp_file in self._temp_files:
+ try:
+ if os.path.exists(temp_file):
+ os.remove(temp_file)
+ logger.debug(f"Removed temporary file: {temp_file}")
+ except Exception as e:
+ logger.warning(f"Failed to remove temporary file: {str(e)}")
+
+ self._temp_files.clear()
+ self.downloader.cleanup()
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/video_utils.py b/backend/gradio_labanmovementanalysis/video_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..9671d653b896423afc9601a5d70b68692d6fe409
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/video_utils.py
@@ -0,0 +1,150 @@
+"""
+Video utilities for reading and writing video files, extracting frames, and assembling videos.
+This module isolates video I/O logic from the rest of the pipeline.
+"""
+
+import cv2
+import numpy as np
+from typing import Generator, List, Tuple, Optional
+from pathlib import Path
+
+
+def extract_frames(video_path: str) -> Generator[np.ndarray, None, None]:
+ """
+ Extract frames from a video file.
+
+ Args:
+ video_path: Path to the input video file
+
+ Yields:
+ numpy arrays representing each frame (BGR format)
+ """
+ cap = cv2.VideoCapture(video_path)
+ if not cap.isOpened():
+ raise ValueError(f"Could not open video file: {video_path}")
+
+ try:
+ while True:
+ ret, frame = cap.read()
+ if not ret:
+ break
+ yield frame
+ finally:
+ cap.release()
+
+
+def get_video_info(video_path: str) -> Tuple[int, float, Tuple[int, int]]:
+ """
+ Get video metadata.
+
+ Args:
+ video_path: Path to the video file
+
+ Returns:
+ Tuple of (frame_count, fps, (width, height))
+ """
+ cap = cv2.VideoCapture(video_path)
+ if not cap.isOpened():
+ raise ValueError(f"Could not open video file: {video_path}")
+
+ try:
+ frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
+ fps = cap.get(cv2.CAP_PROP_FPS)
+ width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
+ height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
+ return frame_count, fps, (width, height)
+ finally:
+ cap.release()
+
+
+def assemble_video(frames: List[np.ndarray], output_path: str, fps: float) -> str:
+ """
+ Assemble frames into a video file.
+
+ Args:
+ frames: List of frame arrays (BGR format)
+ output_path: Path for the output video file
+ fps: Frames per second for the output video
+
+ Returns:
+ Path to the created video file
+ """
+ if not frames:
+ raise ValueError("No frames provided for video assembly")
+
+ # Get frame dimensions from first frame
+ height, width = frames[0].shape[:2]
+
+ # Create video writer
+ fourcc = cv2.VideoWriter_fourcc(*'mp4v')
+ out = cv2.VideoWriter(output_path, fourcc, fps, (width, height))
+
+ if not out.isOpened():
+ raise ValueError(f"Could not create video writer for: {output_path}")
+
+ try:
+ for frame in frames:
+ out.write(frame)
+ return output_path
+ finally:
+ out.release()
+
+
+def resize_frame(frame: np.ndarray, size: Optional[Tuple[int, int]] = None,
+ max_dimension: Optional[int] = None) -> np.ndarray:
+ """
+ Resize a frame to specified dimensions.
+
+ Args:
+ frame: Input frame array
+ size: Target (width, height) if provided
+ max_dimension: Max dimension to constrain to while maintaining aspect ratio
+
+ Returns:
+ Resized frame
+ """
+ if size is not None:
+ return cv2.resize(frame, size)
+
+ if max_dimension is not None:
+ h, w = frame.shape[:2]
+ if max(h, w) > max_dimension:
+ scale = max_dimension / max(h, w)
+ new_w = int(w * scale)
+ new_h = int(h * scale)
+ return cv2.resize(frame, (new_w, new_h))
+
+ return frame
+
+
+def frames_to_video_buffer(frames: List[np.ndarray], fps: float) -> bytes:
+ """
+ Convert frames to video buffer in memory (useful for Gradio).
+
+ Args:
+ frames: List of frame arrays
+ fps: Frames per second
+
+ Returns:
+ Video data as bytes
+ """
+ import tempfile
+ import os
+
+ # Create temporary file
+ with tempfile.NamedTemporaryFile(suffix='.mp4', delete=False) as tmp:
+ tmp_path = tmp.name
+
+ try:
+ # Write video to temp file
+ assemble_video(frames, tmp_path, fps)
+
+ # Read back as bytes
+ with open(tmp_path, 'rb') as f:
+ video_data = f.read()
+
+ return video_data
+ finally:
+ # Clean up temp file
+ if os.path.exists(tmp_path):
+ os.unlink(tmp_path)
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/visualizer.py b/backend/gradio_labanmovementanalysis/visualizer.py
new file mode 100644
index 0000000000000000000000000000000000000000..a236b1a956e6b8fc7260b48cd057ba0830fd2a24
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/visualizer.py
@@ -0,0 +1,402 @@
+"""
+Visualizer for creating annotated videos with pose overlays and movement indicators.
+"""
+
+import cv2
+import numpy as np
+from typing import List, Tuple, Optional, Dict, Any
+from collections import deque
+import colorsys
+
+from .pose_estimation import PoseResult, Keypoint
+from .notation_engine import MovementMetrics, Direction, Intensity, Speed
+
+
+class PoseVisualizer:
+ """Creates visual overlays for pose and movement analysis."""
+
+ # COCO skeleton connections for visualization
+ COCO_SKELETON = [
+ # Face
+ (0, 1), (0, 2), (1, 3), (2, 4), # nose to eyes, eyes to ears
+ # Upper body
+ (5, 6), # shoulders
+ (5, 7), (7, 9), # left arm
+ (6, 8), (8, 10), # right arm
+ (5, 11), (6, 12), # shoulders to hips
+ # Lower body
+ (11, 12), # hips
+ (11, 13), (13, 15), # left leg
+ (12, 14), (14, 16), # right leg
+ ]
+
+ # MediaPipe skeleton connections (33 landmarks)
+ MEDIAPIPE_SKELETON = [
+ # Face connections
+ (0, 1), (1, 2), (2, 3), (3, 7), # left eye region
+ (0, 4), (4, 5), (5, 6), (6, 8), # right eye region
+ (9, 10), # mouth
+ # Upper body
+ (11, 12), # shoulders
+ (11, 13), (13, 15), # left arm
+ (12, 14), (14, 16), # right arm
+ (11, 23), (12, 24), # shoulders to hips
+ (23, 24), # hips
+ # Lower body
+ (23, 25), (25, 27), (27, 29), (27, 31), # left leg
+ (24, 26), (26, 28), (28, 30), (28, 32), # right leg
+ # Hands
+ (15, 17), (15, 19), (15, 21), # left hand
+ (16, 18), (16, 20), (16, 22), # right hand
+ ]
+
+ def __init__(self,
+ trail_length: int = 10,
+ show_skeleton: bool = True,
+ show_trails: bool = True,
+ show_direction_arrows: bool = True,
+ show_metrics: bool = True):
+ """
+ Initialize visualizer.
+
+ Args:
+ trail_length: Number of previous frames to show in motion trail
+ show_skeleton: Whether to draw pose skeleton
+ show_trails: Whether to draw motion trails
+ show_direction_arrows: Whether to show movement direction arrows
+ show_metrics: Whether to display text metrics on frame
+ """
+ self.trail_length = trail_length
+ self.show_skeleton = show_skeleton
+ self.show_trails = show_trails
+ self.show_direction_arrows = show_direction_arrows
+ self.show_metrics = show_metrics
+
+ # Trail history for each keypoint
+ self.trails = {}
+
+ # Color mapping for intensity
+ self.intensity_colors = {
+ Intensity.LOW: (0, 255, 0), # Green
+ Intensity.MEDIUM: (0, 165, 255), # Orange
+ Intensity.HIGH: (0, 0, 255) # Red
+ }
+
+ def visualize_frame(self,
+ frame: np.ndarray,
+ pose_results: List[PoseResult],
+ movement_metrics: Optional[MovementMetrics] = None,
+ frame_index: int = 0) -> np.ndarray:
+ """
+ Add visual annotations to a single frame.
+
+ Args:
+ frame: Input frame
+ pose_results: Pose detection results for this frame
+ movement_metrics: Movement analysis metrics for this frame
+ frame_index: Current frame index
+
+ Returns:
+ Annotated frame
+ """
+ # Create a copy to avoid modifying original
+ vis_frame = frame.copy()
+
+ # Draw for each detected person
+ for person_idx, pose in enumerate(pose_results):
+ # Update trails
+ if self.show_trails:
+ self._update_trails(pose, person_idx)
+ self._draw_trails(vis_frame, person_idx)
+
+ # Draw skeleton
+ if self.show_skeleton:
+ color = self._get_color_for_metrics(movement_metrics)
+ self._draw_skeleton(vis_frame, pose, color)
+
+ # Draw keypoints
+ self._draw_keypoints(vis_frame, pose, movement_metrics)
+
+ # Draw direction arrow
+ if self.show_direction_arrows and movement_metrics:
+ self._draw_direction_arrow(vis_frame, pose, movement_metrics)
+
+ # Draw metrics overlay
+ if self.show_metrics and movement_metrics:
+ self._draw_metrics_overlay(vis_frame, movement_metrics)
+
+ return vis_frame
+
+ def generate_overlay_video(self,
+ frames: List[np.ndarray],
+ all_pose_results: List[List[PoseResult]],
+ all_movement_metrics: List[MovementMetrics],
+ output_path: str,
+ fps: float) -> str:
+ """
+ Generate complete video with overlays.
+
+ Args:
+ frames: List of video frames
+ all_pose_results: Pose results for each frame
+ all_movement_metrics: Movement metrics for each frame
+ output_path: Path for output video
+ fps: Frames per second
+
+ Returns:
+ Path to created video
+ """
+ if len(frames) != len(all_pose_results) or len(frames) != len(all_movement_metrics):
+ raise ValueError("Mismatched lengths between frames, poses, and metrics")
+
+ # Reset trails
+ self.trails = {}
+
+ # Process each frame
+ annotated_frames = []
+ for i, (frame, poses, metrics) in enumerate(
+ zip(frames, all_pose_results, all_movement_metrics)
+ ):
+ annotated_frame = self.visualize_frame(frame, poses, metrics, i)
+ annotated_frames.append(annotated_frame)
+
+ # Import video_utils locally to avoid circular import
+ from . import video_utils
+ return video_utils.assemble_video(annotated_frames, output_path, fps)
+
+ def _update_trails(self, pose: PoseResult, person_id: int):
+ """Update motion trails for a person."""
+ if person_id not in self.trails:
+ self.trails[person_id] = {}
+
+ for kp in pose.keypoints:
+ if kp.confidence < 0.3:
+ continue
+
+ if kp.name not in self.trails[person_id]:
+ self.trails[person_id][kp.name] = deque(maxlen=self.trail_length)
+
+ # Convert normalized coordinates to pixel coordinates
+ # This assumes we'll scale them when drawing
+ self.trails[person_id][kp.name].append((kp.x, kp.y))
+
+ def _draw_trails(self, frame: np.ndarray, person_id: int):
+ """Draw motion trails for a person."""
+ if person_id not in self.trails:
+ return
+
+ h, w = frame.shape[:2]
+
+ for joint_name, trail in self.trails[person_id].items():
+ if len(trail) < 2:
+ continue
+
+ # Draw trail with fading effect
+ for i in range(1, len(trail)):
+ # Calculate opacity based on position in trail
+ alpha = i / len(trail)
+ color = tuple(int(c * alpha) for c in (255, 255, 255))
+
+ # Convert normalized to pixel coordinates
+ pt1 = (int(trail[i-1][0] * w), int(trail[i-1][1] * h))
+ pt2 = (int(trail[i][0] * w), int(trail[i][1] * h))
+
+ # Draw trail segment
+ cv2.line(frame, pt1, pt2, color, thickness=max(1, int(3 * alpha)))
+
+ def _draw_skeleton(self, frame: np.ndarray, pose: PoseResult, color: Tuple[int, int, int]):
+ """Draw pose skeleton."""
+ h, w = frame.shape[:2]
+
+ # Create keypoint lookup
+ kp_dict = {kp.name: kp for kp in pose.keypoints if kp.confidence > 0.3}
+
+ # Determine which skeleton to use based on available keypoints
+ skeleton = self._get_skeleton_for_model(pose.keypoints)
+
+ # Map keypoint names to indices
+ keypoint_names = self._get_keypoint_names_for_model(pose.keypoints)
+ name_to_idx = {name: i for i, name in enumerate(keypoint_names)}
+
+ # Draw skeleton connections
+ for connection in skeleton:
+ idx1, idx2 = connection
+ if idx1 < len(keypoint_names) and idx2 < len(keypoint_names):
+ name1 = keypoint_names[idx1]
+ name2 = keypoint_names[idx2]
+
+ if name1 in kp_dict and name2 in kp_dict:
+ kp1 = kp_dict[name1]
+ kp2 = kp_dict[name2]
+
+ # Convert to pixel coordinates
+ pt1 = (int(kp1.x * w), int(kp1.y * h))
+ pt2 = (int(kp2.x * w), int(kp2.y * h))
+
+ # Draw line
+ cv2.line(frame, pt1, pt2, color, thickness=2)
+
+ def _draw_keypoints(self, frame: np.ndarray, pose: PoseResult,
+ metrics: Optional[MovementMetrics] = None):
+ """Draw individual keypoints."""
+ h, w = frame.shape[:2]
+
+ for kp in pose.keypoints:
+ if kp.confidence < 0.3:
+ continue
+
+ # Convert to pixel coordinates
+ pt = (int(kp.x * w), int(kp.y * h))
+
+ # Color based on confidence
+ color = self._confidence_to_color(kp.confidence)
+
+ # Draw keypoint
+ cv2.circle(frame, pt, 4, color, -1)
+ cv2.circle(frame, pt, 5, (255, 255, 255), 1) # White border
+
+ def _draw_direction_arrow(self, frame: np.ndarray, pose: PoseResult,
+ metrics: MovementMetrics):
+ """Draw arrow indicating movement direction."""
+ if metrics.direction == Direction.STATIONARY:
+ return
+
+ h, w = frame.shape[:2]
+
+ # Get body center
+ center_x = np.mean([kp.x for kp in pose.keypoints if kp.confidence > 0.3])
+ center_y = np.mean([kp.y for kp in pose.keypoints if kp.confidence > 0.3])
+
+ # Convert to pixel coordinates
+ center = (int(center_x * w), int(center_y * h))
+
+ # Calculate arrow endpoint based on direction
+ arrow_length = 50
+ direction_vectors = {
+ Direction.UP: (0, -1),
+ Direction.DOWN: (0, 1),
+ Direction.LEFT: (-1, 0),
+ Direction.RIGHT: (1, 0),
+ }
+
+ if metrics.direction in direction_vectors:
+ dx, dy = direction_vectors[metrics.direction]
+ end_point = (
+ center[0] + int(dx * arrow_length),
+ center[1] + int(dy * arrow_length)
+ )
+
+ # Color based on speed
+ color = self._get_color_for_metrics(metrics)
+
+ # Draw arrow
+ cv2.arrowedLine(frame, center, end_point, color, thickness=3, tipLength=0.3)
+
+ def _draw_metrics_overlay(self, frame: np.ndarray, metrics: MovementMetrics):
+ """Draw text overlay with movement metrics."""
+ # Define text properties
+ font = cv2.FONT_HERSHEY_SIMPLEX
+ font_scale = 0.6
+ thickness = 2
+
+ # Create text lines
+ lines = [
+ f"Direction: {metrics.direction.value}",
+ f"Speed: {metrics.speed.value} ({metrics.velocity:.2f})",
+ f"Intensity: {metrics.intensity.value}",
+ f"Fluidity: {metrics.fluidity:.2f}",
+ f"Expansion: {metrics.expansion:.2f}"
+ ]
+
+ # Draw background rectangle
+ y_offset = 30
+ max_width = max([cv2.getTextSize(line, font, font_scale, thickness)[0][0]
+ for line in lines])
+ bg_height = len(lines) * 25 + 10
+
+ cv2.rectangle(frame, (10, 10), (20 + max_width, 10 + bg_height),
+ (0, 0, 0), -1)
+ cv2.rectangle(frame, (10, 10), (20 + max_width, 10 + bg_height),
+ (255, 255, 255), 1)
+
+ # Draw text
+ for i, line in enumerate(lines):
+ color = (255, 255, 255)
+ if i == 2: # Intensity line
+ color = self.intensity_colors.get(metrics.intensity, (255, 255, 255))
+
+ cv2.putText(frame, line, (15, y_offset + i * 25),
+ font, font_scale, color, thickness)
+
+ def _get_color_for_metrics(self, metrics: Optional[MovementMetrics]) -> Tuple[int, int, int]:
+ """Get color based on movement metrics."""
+ if metrics is None:
+ return (255, 255, 255) # White default
+
+ return self.intensity_colors.get(metrics.intensity, (255, 255, 255))
+
+ def _confidence_to_color(self, confidence: float) -> Tuple[int, int, int]:
+ """Convert confidence score to color (green=high, red=low)."""
+ # Use HSV color space for smooth gradient
+ hue = confidence * 120 # 0=red, 120=green
+ rgb = colorsys.hsv_to_rgb(hue / 360, 1.0, 1.0)
+ return tuple(int(c * 255) for c in reversed(rgb)) # BGR for OpenCV
+
+ def _get_skeleton_for_model(self, keypoints: List[Keypoint]) -> List[Tuple[int, int]]:
+ """Determine which skeleton definition to use based on keypoints."""
+ # Simple heuristic: if we have more than 20 keypoints, use MediaPipe skeleton
+ if len(keypoints) > 20:
+ return self.MEDIAPIPE_SKELETON
+ return self.COCO_SKELETON
+
+ def _get_keypoint_names_for_model(self, keypoints: List[Keypoint]) -> List[str]:
+ """Get ordered list of keypoint names for the model."""
+ # If keypoints have names, use them
+ if keypoints and keypoints[0].name:
+ return [kp.name for kp in keypoints]
+
+ # Otherwise, use default COCO names
+ from .pose_estimation import MoveNetPoseEstimator
+ return MoveNetPoseEstimator.KEYPOINT_NAMES
+
+
+def create_visualization(
+ video_path: str,
+ pose_results: List[List[PoseResult]],
+ movement_metrics: List[MovementMetrics],
+ output_path: str,
+ show_trails: bool = True,
+ show_metrics: bool = True
+) -> str:
+ """
+ Convenience function to create a visualization from a video file.
+
+ Args:
+ video_path: Path to input video
+ pose_results: Pose detection results
+ movement_metrics: Movement analysis results
+ output_path: Path for output video
+ show_trails: Whether to show motion trails
+ show_metrics: Whether to show metrics overlay
+
+ Returns:
+ Path to created video
+ """
+ from . import video_utils
+
+ # Extract frames
+ frames = list(video_utils.extract_frames(video_path))
+
+ # Get video info
+ _, fps, _ = video_utils.get_video_info(video_path)
+
+ # Create visualizer
+ visualizer = PoseVisualizer(
+ show_trails=show_trails,
+ show_metrics=show_metrics
+ )
+
+ # Generate overlay video
+ return visualizer.generate_overlay_video(
+ frames, pose_results, movement_metrics, output_path, fps
+ )
\ No newline at end of file
diff --git a/backend/gradio_labanmovementanalysis/webrtc_handler.py b/backend/gradio_labanmovementanalysis/webrtc_handler.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f55da3c6f16f2aa3d9c4a74cc793804d266f1ad
--- /dev/null
+++ b/backend/gradio_labanmovementanalysis/webrtc_handler.py
@@ -0,0 +1,293 @@
+"""
+Professional WebRTC handler for real-time video streaming and movement analysis
+Using FastRTC (the current WebRTC standard, replaces deprecated gradio-webrtc)
+Based on: https://fastrtc.org and https://www.gradio.app/guides/object-detection-from-webcam-with-webrtc
+"""
+
+import cv2
+import numpy as np
+from typing import Optional, Dict, Any, Tuple
+from collections import deque
+import time
+import logging
+import os
+
+from .pose_estimation import get_pose_estimator
+from .notation_engine import MovementAnalyzer
+from .visualizer import PoseVisualizer
+
+logger = logging.getLogger(__name__)
+
+# Official Gradio WebRTC approach (compatible with NumPy 1.x)
+try:
+ from gradio_webrtc import WebRTC
+ HAS_WEBRTC_COMPONENT = True
+except ImportError:
+ HAS_WEBRTC_COMPONENT = False
+
+
+class RealtimeMovementAnalyzer:
+ """Real-time movement analyzer for WebRTC streams following Gradio 5 best practices"""
+
+ # Gradio component compatibility
+ events = {}
+
+ def __init__(self, model: str = "mediapipe-lite", buffer_size: int = 30):
+ """
+ Initialize real-time movement analyzer.
+
+ Args:
+ model: Pose estimation model optimized for real-time processing
+ buffer_size: Number of frames to buffer for analysis
+ """
+ self.model = model
+ self.pose_estimator = get_pose_estimator(model)
+ self.movement_analyzer = MovementAnalyzer(fps=30.0)
+ self.visualizer = PoseVisualizer(
+ trail_length=10,
+ show_skeleton=True,
+ show_trails=True,
+ show_direction_arrows=True,
+ show_metrics=True
+ )
+
+ # Real-time buffers
+ self.pose_buffer = deque(maxlen=buffer_size)
+ self.metrics_buffer = deque(maxlen=buffer_size)
+
+ # Performance tracking
+ self.frame_count = 0
+ self.last_fps_update = time.time()
+ self.current_fps = 0.0
+
+ # Current metrics for display
+ self.current_metrics = {
+ "direction": "stationary",
+ "intensity": "low",
+ "fluidity": 0.0,
+ "expansion": 0.5,
+ "fps": 0.0
+ }
+
+ def process_frame(self, image: np.ndarray, conf_threshold: float = 0.5) -> np.ndarray:
+ """
+ Process a single frame from WebRTC stream for real-time movement analysis.
+
+ Args:
+ image: Input frame from webcam as numpy array (RGB format from WebRTC)
+ conf_threshold: Confidence threshold for pose detection
+
+ Returns:
+ Processed frame with pose overlay and movement metrics
+ """
+ if image is None:
+ return None
+
+ # Convert RGB to BGR for OpenCV processing
+ frame_bgr = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
+
+ # Update frame count and FPS
+ self.frame_count += 1
+ current_time = time.time()
+ if current_time - self.last_fps_update >= 1.0:
+ self.current_fps = self.frame_count / (current_time - self.last_fps_update)
+ self.frame_count = 0
+ self.last_fps_update = current_time
+ self.current_metrics["fps"] = self.current_fps
+
+ # Pose detection
+ pose_results = self.pose_estimator.detect(frame_bgr)
+
+ # Store pose data
+ self.pose_buffer.append(pose_results)
+
+ # Calculate movement metrics if we have enough frames
+ if len(self.pose_buffer) >= 2:
+ recent_poses = list(self.pose_buffer)[-10:] # Last 10 frames for analysis
+
+ try:
+ # Analyze movement from recent poses
+ movement_metrics = self.movement_analyzer.analyze_movement(recent_poses)
+
+ if movement_metrics:
+ latest_metrics = movement_metrics[-1]
+ self.current_metrics.update({
+ "direction": latest_metrics.direction.value if latest_metrics.direction else "stationary",
+ "intensity": latest_metrics.intensity.value if latest_metrics.intensity else "low",
+ "fluidity": latest_metrics.fluidity if latest_metrics.fluidity is not None else 0.0,
+ "expansion": latest_metrics.expansion if latest_metrics.expansion is not None else 0.5
+ })
+
+ self.metrics_buffer.append(self.current_metrics.copy())
+
+ except Exception as e:
+ logger.warning(f"Movement analysis error: {e}")
+
+ # Apply visualization overlays
+ output_frame = self._apply_visualization(frame_bgr, pose_results, self.current_metrics)
+
+ # Convert back to RGB for WebRTC output
+ output_rgb = cv2.cvtColor(output_frame, cv2.COLOR_BGR2RGB)
+
+ return output_rgb
+
+ def _apply_visualization(self, frame: np.ndarray, pose_results: list, metrics: dict) -> np.ndarray:
+ """Apply pose and movement visualization overlays"""
+ output_frame = frame.copy()
+
+ # Draw pose skeleton if detected
+ if pose_results:
+ for pose_result in pose_results:
+ # Draw skeleton
+ if hasattr(self.visualizer, 'draw_skeleton'):
+ output_frame = self.visualizer.draw_skeleton(output_frame, pose_result.keypoints)
+
+ # Draw keypoints
+ for keypoint in pose_result.keypoints:
+ if keypoint.confidence > 0.5:
+ x = int(keypoint.x * frame.shape[1])
+ y = int(keypoint.y * frame.shape[0])
+ cv2.circle(output_frame, (x, y), 5, (0, 255, 0), -1)
+
+ # Draw real-time metrics overlay
+ self._draw_metrics_overlay(output_frame, metrics)
+
+ return output_frame
+
+ def _draw_metrics_overlay(self, frame: np.ndarray, metrics: dict):
+ """Draw real-time metrics overlay following professional UI standards"""
+ h, w = frame.shape[:2]
+
+ # Semi-transparent background
+ overlay = frame.copy()
+ cv2.rectangle(overlay, (10, 10), (320, 160), (0, 0, 0), -1)
+ cv2.addWeighted(overlay, 0.3, frame, 0.7, 0, frame)
+
+ # Header
+ cv2.putText(frame, "Real-time Movement Analysis", (20, 35),
+ cv2.FONT_HERSHEY_SIMPLEX, 0.6, (255, 255, 255), 2)
+
+ # Metrics
+ y_offset = 60
+ spacing = 22
+
+ cv2.putText(frame, f"Direction: {metrics['direction']}",
+ (20, y_offset), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1)
+ y_offset += spacing
+
+ cv2.putText(frame, f"Intensity: {metrics['intensity']}",
+ (20, y_offset), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1)
+ y_offset += spacing
+
+ cv2.putText(frame, f"Fluidity: {metrics['fluidity']:.2f}",
+ (20, y_offset), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1)
+ y_offset += spacing
+
+ cv2.putText(frame, f"FPS: {metrics['fps']:.1f}",
+ (20, y_offset), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 0), 1)
+
+ def get_current_metrics(self) -> dict:
+ """Get current movement metrics for external display"""
+ return self.current_metrics.copy()
+
+
+def get_rtc_configuration():
+ """
+ Get RTC configuration for WebRTC.
+ Uses Twilio TURN servers if credentials are available, otherwise uses default.
+ """
+ # For local development, no TURN servers needed
+ # For cloud deployment, set TWILIO_ACCOUNT_SID and TWILIO_AUTH_TOKEN
+
+ twilio_account_sid = os.getenv("TWILIO_ACCOUNT_SID")
+ twilio_auth_token = os.getenv("TWILIO_AUTH_TOKEN")
+
+ if twilio_account_sid and twilio_auth_token:
+ # Use Twilio TURN servers for cloud deployment
+ return {
+ "iceServers": [
+ {"urls": ["stun:global.stun.twilio.com:3478"]},
+ {
+ "urls": ["turn:global.turn.twilio.com:3478?transport=udp"],
+ "username": twilio_account_sid,
+ "credential": twilio_auth_token,
+ },
+ {
+ "urls": ["turn:global.turn.twilio.com:3478?transport=tcp"],
+ "username": twilio_account_sid,
+ "credential": twilio_auth_token,
+ },
+ ]
+ }
+ else:
+ # Default configuration for local development
+ return {
+ "iceServers": [
+ {"urls": ["stun:stun.l.google.com:19302"]}
+ ]
+ }
+
+
+# Global analyzer instance for demo
+_analyzer = None
+
+def get_analyzer(model: str = "mediapipe-lite") -> RealtimeMovementAnalyzer:
+ """Get or create analyzer instance"""
+ global _analyzer
+ if _analyzer is None or _analyzer.model != model:
+ _analyzer = RealtimeMovementAnalyzer(model)
+ return _analyzer
+
+
+def webrtc_detection(image: np.ndarray, model: str, conf_threshold: float = 0.5) -> np.ndarray:
+ """
+ Main detection function for WebRTC streaming.
+ Compatible with Gradio 5 WebRTC streaming API.
+
+ Args:
+ image: Input frame from webcam (RGB format)
+ model: Pose estimation model name
+ conf_threshold: Confidence threshold for pose detection
+
+ Returns:
+ Processed frame with pose overlay and metrics
+ """
+ analyzer = get_analyzer(model)
+ return analyzer.process_frame(image, conf_threshold)
+
+
+def get_webrtc_interface():
+ """
+ Create streaming interface using built-in Gradio components.
+ Avoids NumPy 2.x dependency conflicts with FastRTC.
+
+ Returns:
+ Tuple of (streaming_config, rtc_configuration)
+ """
+ rtc_config = get_rtc_configuration()
+
+ # Use built-in Gradio streaming capabilities
+ streaming_config = {
+ "sources": ["webcam"],
+ "streaming": True,
+ "mirror_webcam": False
+ }
+
+ return streaming_config, rtc_config
+
+
+# Compatibility exports with Gradio component attributes
+class WebRTCMovementAnalyzer(RealtimeMovementAnalyzer):
+ """Real-time movement analyzer for WebRTC streams following Gradio 5 best practices"""
+ events = {} # Gradio component compatibility
+
+
+class WebRTCGradioInterface:
+ """Create streaming interface using built-in Gradio components.
+ Avoids NumPy 2.x dependency conflicts with FastRTC."""
+
+ events = {} # Gradio component compatibility
+
+ @staticmethod
+ def get_config():
+ return get_webrtc_interface()
\ No newline at end of file
diff --git a/backend/mcp_server.py b/backend/mcp_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..0027d3c210c582e146e60eaa55d8606f3d1843fa
--- /dev/null
+++ b/backend/mcp_server.py
@@ -0,0 +1,413 @@
+"""
+MCP (Model Context Protocol) Server for Laban Movement Analysis
+Provides tools for video movement analysis accessible to AI agents
+"""
+
+import asyncio
+import json
+import os
+import tempfile
+from datetime import datetime
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Tuple
+from urllib.parse import urlparse
+import aiofiles
+import httpx
+
+from mcp.server import Server
+from mcp.server.stdio import stdio_server
+from mcp.types import (
+ Tool,
+ TextContent,
+ ImageContent,
+ EmbeddedResource,
+ ToolParameterType,
+ ToolResponse,
+ ToolResult,
+ ToolError
+)
+
+# Add parent directory to path for imports
+import sys
+sys.path.insert(0, str(Path(__file__).parent))
+
+from gradio_labanmovementanalysis import LabanMovementAnalysis
+
+
+class LabanMCPServer:
+ """MCP Server for Laban Movement Analysis"""
+
+ def __init__(self):
+ self.server = Server("laban-movement-analysis")
+ self.analyzer = LabanMovementAnalysis()
+ self.analysis_cache = {}
+ self.temp_dir = tempfile.mkdtemp(prefix="laban_mcp_")
+
+ # Register tools
+ self._register_tools()
+
+ def _register_tools(self):
+ """Register all available tools"""
+
+ @self.server.tool()
+ async def analyze_video(
+ video_path: str,
+ model: str = "mediapipe",
+ enable_visualization: bool = False,
+ include_keypoints: bool = False
+ ) -> ToolResult:
+ """
+ Analyze movement in a video file using Laban Movement Analysis.
+
+ Args:
+ video_path: Path or URL to video file
+ model: Pose estimation model ('mediapipe', 'movenet', 'yolo')
+ enable_visualization: Generate annotated video output
+ include_keypoints: Include raw keypoint data in JSON
+
+ Returns:
+ Movement analysis results and optional visualization
+ """
+ try:
+ # Handle URL vs local path
+ if video_path.startswith(('http://', 'https://')):
+ video_path = await self._download_video(video_path)
+
+ # Process video
+ json_output, viz_video = await asyncio.to_thread(
+ self.analyzer.process_video,
+ video_path,
+ model=model,
+ enable_visualization=enable_visualization,
+ include_keypoints=include_keypoints
+ )
+
+ # Store in cache
+ analysis_id = f"{Path(video_path).stem}_{datetime.now().isoformat()}"
+ self.analysis_cache[analysis_id] = {
+ "json_output": json_output,
+ "viz_video": viz_video,
+ "timestamp": datetime.now().isoformat()
+ }
+
+ # Format response
+ response_data = {
+ "analysis_id": analysis_id,
+ "analysis": json_output,
+ "visualization_path": viz_video if viz_video else None
+ }
+
+ return ToolResult(
+ success=True,
+ content=[TextContent(text=json.dumps(response_data, indent=2))]
+ )
+
+ except Exception as e:
+ return ToolResult(
+ success=False,
+ error=ToolError(message=f"Analysis failed: {str(e)}")
+ )
+
+ @self.server.tool()
+ async def get_analysis_summary(
+ analysis_id: str
+ ) -> ToolResult:
+ """
+ Get a human-readable summary of a previous analysis.
+
+ Args:
+ analysis_id: ID of the analysis to summarize
+
+ Returns:
+ Summary of movement analysis
+ """
+ try:
+ if analysis_id not in self.analysis_cache:
+ return ToolResult(
+ success=False,
+ error=ToolError(message=f"Analysis ID '{analysis_id}' not found")
+ )
+
+ analysis_data = self.analysis_cache[analysis_id]["json_output"]
+
+ # Extract key information
+ summary = self._generate_summary(analysis_data)
+
+ return ToolResult(
+ success=True,
+ content=[TextContent(text=summary)]
+ )
+
+ except Exception as e:
+ return ToolResult(
+ success=False,
+ error=ToolError(message=f"Summary generation failed: {str(e)}")
+ )
+
+ @self.server.tool()
+ async def list_available_models() -> ToolResult:
+ """
+ List available pose estimation models with their characteristics.
+
+ Returns:
+ Information about available models
+ """
+ models_info = {
+ "mediapipe": {
+ "name": "MediaPipe Pose",
+ "keypoints": 33,
+ "dimensions": "3D",
+ "optimization": "CPU",
+ "best_for": "Single person, detailed analysis",
+ "speed": "Fast"
+ },
+ "movenet": {
+ "name": "MoveNet",
+ "keypoints": 17,
+ "dimensions": "2D",
+ "optimization": "Mobile/Edge",
+ "best_for": "Real-time applications, mobile devices",
+ "speed": "Very Fast"
+ },
+ "yolo": {
+ "name": "YOLO Pose",
+ "keypoints": 17,
+ "dimensions": "2D",
+ "optimization": "GPU",
+ "best_for": "Multi-person detection",
+ "speed": "Fast (with GPU)"
+ }
+ }
+
+ return ToolResult(
+ success=True,
+ content=[TextContent(text=json.dumps(models_info, indent=2))]
+ )
+
+ @self.server.tool()
+ async def batch_analyze(
+ video_paths: List[str],
+ model: str = "mediapipe",
+ parallel: bool = True
+ ) -> ToolResult:
+ """
+ Analyze multiple videos in batch.
+
+ Args:
+ video_paths: List of video paths or URLs
+ model: Pose estimation model to use
+ parallel: Process videos in parallel
+
+ Returns:
+ Batch analysis results
+ """
+ try:
+ results = {}
+
+ if parallel:
+ # Process in parallel
+ tasks = []
+ for path in video_paths:
+ task = self._analyze_single_video(path, model)
+ tasks.append(task)
+
+ analyses = await asyncio.gather(*tasks)
+
+ for path, analysis in zip(video_paths, analyses):
+ results[path] = analysis
+ else:
+ # Process sequentially
+ for path in video_paths:
+ results[path] = await self._analyze_single_video(path, model)
+
+ return ToolResult(
+ success=True,
+ content=[TextContent(text=json.dumps(results, indent=2))]
+ )
+
+ except Exception as e:
+ return ToolResult(
+ success=False,
+ error=ToolError(message=f"Batch analysis failed: {str(e)}")
+ )
+
+ @self.server.tool()
+ async def compare_movements(
+ analysis_id1: str,
+ analysis_id2: str
+ ) -> ToolResult:
+ """
+ Compare movement patterns between two analyzed videos.
+
+ Args:
+ analysis_id1: First analysis ID
+ analysis_id2: Second analysis ID
+
+ Returns:
+ Comparison of movement metrics
+ """
+ try:
+ if analysis_id1 not in self.analysis_cache:
+ return ToolResult(
+ success=False,
+ error=ToolError(message=f"Analysis ID '{analysis_id1}' not found")
+ )
+
+ if analysis_id2 not in self.analysis_cache:
+ return ToolResult(
+ success=False,
+ error=ToolError(message=f"Analysis ID '{analysis_id2}' not found")
+ )
+
+ # Get analyses
+ analysis1 = self.analysis_cache[analysis_id1]["json_output"]
+ analysis2 = self.analysis_cache[analysis_id2]["json_output"]
+
+ # Compare metrics
+ comparison = self._compare_analyses(analysis1, analysis2)
+
+ return ToolResult(
+ success=True,
+ content=[TextContent(text=json.dumps(comparison, indent=2))]
+ )
+
+ except Exception as e:
+ return ToolResult(
+ success=False,
+ error=ToolError(message=f"Comparison failed: {str(e)}")
+ )
+
+ async def _download_video(self, url: str) -> str:
+ """Download video from URL to temporary file"""
+ async with httpx.AsyncClient() as client:
+ response = await client.get(url)
+ response.raise_for_status()
+
+ # Save to temp file
+ filename = Path(urlparse(url).path).name or "video.mp4"
+ temp_path = os.path.join(self.temp_dir, filename)
+
+ async with aiofiles.open(temp_path, 'wb') as f:
+ await f.write(response.content)
+
+ return temp_path
+
+ async def _analyze_single_video(self, path: str, model: str) -> Dict[str, Any]:
+ """Analyze a single video"""
+ try:
+ if path.startswith(('http://', 'https://')):
+ path = await self._download_video(path)
+
+ json_output, _ = await asyncio.to_thread(
+ self.analyzer.process_video,
+ path,
+ model=model,
+ enable_visualization=False
+ )
+
+ return {
+ "status": "success",
+ "analysis": json_output
+ }
+ except Exception as e:
+ return {
+ "status": "error",
+ "error": str(e)
+ }
+
+ def _generate_summary(self, analysis_data: Dict[str, Any]) -> str:
+ """Generate human-readable summary from analysis data"""
+ summary_parts = []
+
+ # Video info
+ video_info = analysis_data.get("video_info", {})
+ summary_parts.append(f"Video Analysis Summary")
+ summary_parts.append(f"Duration: {video_info.get('duration_seconds', 0):.1f} seconds")
+ summary_parts.append(f"Resolution: {video_info.get('width', 0)}x{video_info.get('height', 0)}")
+ summary_parts.append("")
+
+ # Movement summary
+ movement_summary = analysis_data.get("movement_analysis", {}).get("summary", {})
+
+ # Direction analysis
+ direction_data = movement_summary.get("direction", {})
+ dominant_direction = direction_data.get("dominant", "unknown")
+ summary_parts.append(f"Dominant Movement Direction: {dominant_direction}")
+
+ # Intensity analysis
+ intensity_data = movement_summary.get("intensity", {})
+ dominant_intensity = intensity_data.get("dominant", "unknown")
+ summary_parts.append(f"Movement Intensity: {dominant_intensity}")
+
+ # Speed analysis
+ speed_data = movement_summary.get("speed", {})
+ dominant_speed = speed_data.get("dominant", "unknown")
+ summary_parts.append(f"Movement Speed: {dominant_speed}")
+
+ # Segments
+ segments = movement_summary.get("movement_segments", [])
+ if segments:
+ summary_parts.append(f"\nMovement Segments: {len(segments)}")
+ for i, segment in enumerate(segments[:3]): # Show first 3
+ start_time = segment.get("start_time", 0)
+ end_time = segment.get("end_time", 0)
+ movement_type = segment.get("movement_type", "unknown")
+ summary_parts.append(f" Segment {i+1}: {movement_type} ({start_time:.1f}s - {end_time:.1f}s)")
+
+ return "\n".join(summary_parts)
+
+ def _compare_analyses(self, analysis1: Dict, analysis2: Dict) -> Dict[str, Any]:
+ """Compare two movement analyses"""
+ comparison = {
+ "video1_info": analysis1.get("video_info", {}),
+ "video2_info": analysis2.get("video_info", {}),
+ "metric_comparison": {}
+ }
+
+ # Compare summaries
+ summary1 = analysis1.get("movement_analysis", {}).get("summary", {})
+ summary2 = analysis2.get("movement_analysis", {}).get("summary", {})
+
+ # Compare directions
+ dir1 = summary1.get("direction", {})
+ dir2 = summary2.get("direction", {})
+ comparison["metric_comparison"]["direction"] = {
+ "video1_dominant": dir1.get("dominant", "unknown"),
+ "video2_dominant": dir2.get("dominant", "unknown"),
+ "match": dir1.get("dominant") == dir2.get("dominant")
+ }
+
+ # Compare intensity
+ int1 = summary1.get("intensity", {})
+ int2 = summary2.get("intensity", {})
+ comparison["metric_comparison"]["intensity"] = {
+ "video1_dominant": int1.get("dominant", "unknown"),
+ "video2_dominant": int2.get("dominant", "unknown"),
+ "match": int1.get("dominant") == int2.get("dominant")
+ }
+
+ # Compare speed
+ speed1 = summary1.get("speed", {})
+ speed2 = summary2.get("speed", {})
+ comparison["metric_comparison"]["speed"] = {
+ "video1_dominant": speed1.get("dominant", "unknown"),
+ "video2_dominant": speed2.get("dominant", "unknown"),
+ "match": speed1.get("dominant") == speed2.get("dominant")
+ }
+
+ return comparison
+
+ async def run(self):
+ """Run the MCP server"""
+ async with stdio_server() as (read_stream, write_stream):
+ await self.server.run(read_stream, write_stream)
+
+
+async def main():
+ """Main entry point"""
+ server = LabanMCPServer()
+ await server.run()
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/backend/requirements-mcp.txt b/backend/requirements-mcp.txt
new file mode 100644
index 0000000000000000000000000000000000000000..3a5716a0172d1be127e97594a588bd6cbf96a3a8
--- /dev/null
+++ b/backend/requirements-mcp.txt
@@ -0,0 +1,27 @@
+# MCP Server Dependencies
+mcp>=1.0.0
+aiofiles>=23.0.0
+httpx>=0.24.0
+
+# Core dependencies (include from main requirements)
+gradio>=5.0,<6.0
+opencv-python>=4.8.0
+numpy>=1.24.0,<2.0.0 # Pin to 1.x for compatibility with MediaPipe/pandas
+mediapipe>=0.10.0
+tensorflow>=2.13.0 # For MoveNet
+tensorflow-hub>=0.14.0 # For MoveNet models
+ultralytics>=8.0.0 # For YOLO v8/v11
+torch>=2.0.0
+torchvision>=0.15.0
+
+# Video platform support
+yt-dlp>=2023.7.6 # YouTube/Vimeo downloads
+requests>=2.31.0 # Direct video downloads
+
+# Enhanced model support
+transformers>=4.35.0
+accelerate>=0.24.0 # For model optimization
+
+# WebRTC support (Official Gradio approach)
+gradio-webrtc # Official WebRTC component for Gradio
+twilio>=8.2.0 # TURN servers for cloud deployment (optional)
\ No newline at end of file
diff --git a/backend/requirements.txt b/backend/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..43b772d4a28987f9e69e2b3635535e00b6a1961a
--- /dev/null
+++ b/backend/requirements.txt
@@ -0,0 +1,20 @@
+# Core dependencies
+gradio>=5.0.0
+numpy>=1.20.0
+opencv-python>=4.5.0
+
+# Pose estimation model dependencies (install based on your choice)
+# For MediaPipe:
+mediapipe>=0.10.21
+
+# For MoveNet (TensorFlow):
+ tensorflow>=2.8.0
+ tensorflow-hub>=0.12.0
+
+# For YOLO:
+ ultralytics>=8.0.0
+
+# Optional dependencies for development
+# pytest>=7.0.0
+# black>=22.0.0
+# flake8>=4.0.0
\ No newline at end of file
diff --git a/demo/__init__.py b/demo/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/demo/app.py b/demo/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..11947dc3c0f22aaaf76e8b1586639cf91fa07ab5
--- /dev/null
+++ b/demo/app.py
@@ -0,0 +1,866 @@
+"""
+Unified Laban Movement Analysis Demo
+Comprehensive interface combining all features:
+- Standard LMA analysis
+- Enhanced features (WebRTC, YouTube/Vimeo)
+- Agent API (batch processing, filtering)
+- Real-time analysis
+- Model comparison
+
+Created by: Csaba BolyΓ³s (BladeSzaSza)
+Contact: bladeszasza@gmail.com
+GitHub: https://github.com/bladeszasza
+LinkedIn: https://www.linkedin.com/in/csaba-bolyΓ³s-00a11767/
+Hugging Face: https://huggingface.co/BladeSzaSza
+
+Heavy Beta Version - Under Active Development
+"""
+
+import gradio as gr
+import sys
+from pathlib import Path
+from typing import Dict, Any, List, Tuple
+
+# Add parent directory to path
+sys.path.insert(0, str(Path(__file__).parent.parent / "backend"))
+
+from gradio_labanmovementanalysis import LabanMovementAnalysis
+
+# Import agent API if available
+try:
+ from gradio_labanmovementanalysis.agent_api import (
+ LabanAgentAPI,
+ PoseModel,
+ MovementDirection,
+ MovementIntensity
+ )
+ HAS_AGENT_API = True
+except ImportError:
+ HAS_AGENT_API = False
+
+# Import WebRTC components if available
+try:
+ from gradio_webrtc import WebRTC
+ from gradio_labanmovementanalysis.webrtc_handler import (
+ webrtc_detection,
+ get_rtc_configuration
+ )
+ HAS_WEBRTC = True
+except ImportError as e:
+ print(f"WebRTC import failed: {e}")
+ HAS_WEBRTC = False
+
+# Initialize components
+try:
+ # Initialize with WebRTC support
+ analyzer = LabanMovementAnalysis(
+ enable_webrtc=True,
+ enable_visualization=True
+ )
+ print("β
Core features initialized successfully")
+except Exception as e:
+ print(f"Warning: Some features may not be available: {e}")
+ analyzer = LabanMovementAnalysis(enable_webrtc=False)
+
+# Initialize agent API if available
+agent_api = None
+if HAS_AGENT_API:
+ try:
+ agent_api = LabanAgentAPI()
+ except Exception as e:
+ print(f"Warning: Agent API not available: {e}")
+ agent_api = None
+
+
+def process_video_standard(video, model, enable_viz, include_keypoints):
+ """Standard video processing function."""
+ if video is None:
+ return None, None
+
+ try:
+ json_output, video_output = analyzer.process_video(
+ video,
+ model=model,
+ enable_visualization=enable_viz,
+ include_keypoints=include_keypoints
+ )
+ return json_output, video_output
+ except Exception as e:
+ return {"error": str(e)}, None
+
+
+def process_video_enhanced(video_input, model, enable_viz, include_keypoints):
+ """Enhanced video processing with all new features."""
+ if not video_input:
+ return {"error": "No video provided"}, None
+
+ try:
+ # Handle both file upload and URL input
+ video_path = video_input.name if hasattr(video_input, 'name') else video_input
+
+ json_result, viz_result = analyzer.process_video(
+ video_path,
+ model=model,
+ enable_visualization=enable_viz,
+ include_keypoints=include_keypoints
+ )
+ return json_result, viz_result
+ except Exception as e:
+ error_result = {"error": str(e)}
+ return error_result, None
+
+
+def process_video_for_agent(video, model, output_format="summary"):
+ """Process video with agent-friendly output format."""
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not video:
+ return {"error": "No video provided"}
+
+ try:
+ model_enum = PoseModel(model)
+ result = agent_api.analyze(video, model=model_enum, generate_visualization=False)
+
+ if output_format == "summary":
+ return {"summary": agent_api.get_movement_summary(result)}
+ elif output_format == "structured":
+ return {
+ "success": result.success,
+ "direction": result.dominant_direction.value,
+ "intensity": result.dominant_intensity.value,
+ "speed": result.dominant_speed,
+ "fluidity": result.fluidity_score,
+ "expansion": result.expansion_score,
+ "segments": len(result.movement_segments)
+ }
+ else: # json
+ return result.raw_data
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def batch_process_videos(files, model):
+ """Process multiple videos in batch."""
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not files:
+ return {"error": "No videos provided"}
+
+ try:
+ video_paths = [f.name for f in files]
+ results = agent_api.batch_analyze(video_paths, model=PoseModel(model), parallel=True)
+
+ output = {
+ "total_videos": len(results),
+ "successful": sum(1 for r in results if r.success),
+ "failed": sum(1 for r in results if not r.success),
+ "results": []
+ }
+
+ for result in results:
+ output["results"].append({
+ "video": Path(result.video_path).name,
+ "success": result.success,
+ "summary": agent_api.get_movement_summary(result) if result.success else result.error
+ })
+
+ return output
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def filter_videos_by_movement(files, direction, intensity, min_fluidity, min_expansion):
+ """Filter videos based on movement characteristics."""
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not files:
+ return {"error": "No videos provided"}
+
+ try:
+ video_paths = [f.name for f in files]
+
+ dir_filter = MovementDirection(direction) if direction != "any" else None
+ int_filter = MovementIntensity(intensity) if intensity != "any" else None
+
+ filtered = agent_api.filter_by_movement(
+ video_paths,
+ direction=dir_filter,
+ intensity=int_filter,
+ min_fluidity=min_fluidity if min_fluidity > 0 else None,
+ min_expansion=min_expansion if min_expansion > 0 else None
+ )
+
+ return {
+ "total_analyzed": len(video_paths),
+ "matching_videos": len(filtered),
+ "matches": [
+ {
+ "video": Path(r.video_path).name,
+ "direction": r.dominant_direction.value,
+ "intensity": r.dominant_intensity.value,
+ "fluidity": r.fluidity_score,
+ "expansion": r.expansion_score
+ }
+ for r in filtered
+ ]
+ }
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def compare_models(video, model1, model2):
+ """Compare two different pose models on the same video."""
+ if not video:
+ return "No video provided"
+
+ try:
+ # Analyze with both models
+ result1, _ = analyzer.process_video(video, model=model1, enable_visualization=False)
+ result2, _ = analyzer.process_video(video, model=model2, enable_visualization=False)
+
+ # Extract key metrics for comparison
+ def extract_metrics(result):
+ summary = result.get("movement_analysis", {}).get("summary", {})
+ return {
+ "direction": summary.get("direction", {}).get("dominant", "unknown"),
+ "intensity": summary.get("intensity", {}).get("dominant", "unknown"),
+ "speed": summary.get("speed", {}).get("dominant", "unknown"),
+ "frame_count": result.get("video_info", {}).get("frame_count", 0)
+ }
+
+ metrics1 = extract_metrics(result1)
+ metrics2 = extract_metrics(result2)
+
+ # Create comparison table data
+ comparison_data = [
+ ["Direction", metrics1["direction"], metrics2["direction"],
+ "β" if metrics1["direction"] == metrics2["direction"] else "β"],
+ ["Intensity", metrics1["intensity"], metrics2["intensity"],
+ "β" if metrics1["intensity"] == metrics2["intensity"] else "β"],
+ ["Speed", metrics1["speed"], metrics2["speed"],
+ "β" if metrics1["speed"] == metrics2["speed"] else "β"],
+ ["Frames Processed", str(metrics1["frame_count"]), str(metrics2["frame_count"]),
+ "β" if metrics1["frame_count"] == metrics2["frame_count"] else "β"]
+ ]
+
+ return comparison_data
+
+ except Exception as e:
+ return [["Error", str(e), "", ""]]
+
+
+def start_webrtc_stream(model):
+ """Start WebRTC real-time analysis."""
+ try:
+ success = analyzer.start_webrtc_stream(model)
+ if success:
+ return "π’ Stream Active", {"status": "streaming", "model": model}
+ else:
+ return "π΄ Failed to start", {"status": "error"}
+ except Exception as e:
+ return f"π΄ Error: {str(e)}", {"status": "error"}
+
+
+def stop_webrtc_stream():
+ """Stop WebRTC real-time analysis."""
+ try:
+ success = analyzer.stop_webrtc_stream()
+ if success:
+ return "π‘ Stream Stopped", {"status": "stopped"}
+ else:
+ return "π΄ Failed to stop", {"status": "error"}
+ except Exception as e:
+ return f"π΄ Error: {str(e)}", {"status": "error"}
+
+
+def create_unified_demo():
+ """Create the unified comprehensive demo."""
+
+ with gr.Blocks(
+ title="Laban Movement Analysis - Complete Suite by Csaba BolyΓ³s",
+ theme=gr.themes.Soft(),
+ css="""
+ .main-header {
+ background: linear-gradient(135deg, #40826D 0%, #2E5E4A 50%, #1B3A2F 100%);
+ color: white;
+ padding: 30px;
+ border-radius: 10px;
+ margin-bottom: 20px;
+ text-align: center;
+ }
+ .feature-card {
+ border: 1px solid #e1e5e9;
+ border-radius: 8px;
+ padding: 16px;
+ margin: 8px 0;
+ background: #f8f9fa;
+ }
+ .json-output {
+ max-height: 600px;
+ overflow-y: auto;
+ font-family: monospace;
+ font-size: 12px;
+ }
+ .author-info {
+ background: linear-gradient(135deg, #40826D 0%, #2E5E4A 100%);
+ color: white;
+ padding: 15px;
+ border-radius: 8px;
+ margin: 10px 0;
+ text-align: center;
+ }
+ """
+ ) as demo:
+
+ # Main Header
+ gr.HTML("""
+
+
π Laban Movement Analysis - Complete Suite
+
+ Professional movement analysis with pose estimation, AI action recognition,
+ real-time processing, and agent automation
+
+
+ Supports YouTube/Vimeo URLs β’ WebRTC Streaming β’ 20+ Pose Models β’ MCP Integration
+
+
+ Version 0.01-beta - Heavy Beta Under Active Development
+
+
+ """)
+
+ with gr.Tabs():
+ # Tab 1: Standard Analysis
+ with gr.Tab("π¬ Standard Analysis"):
+ gr.Markdown("""
+ ### Classic Laban Movement Analysis
+ Upload a video file to analyze movement using traditional LMA metrics with pose estimation.
+ """)
+
+ with gr.Row():
+ with gr.Column(scale=1):
+ video_input_std = gr.Video(
+ label="Upload Video",
+ sources=["upload"],
+ format="mp4"
+ )
+
+ model_dropdown_std = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Pose Estimation Model"
+ )
+
+ with gr.Row():
+ enable_viz_std = gr.Checkbox(
+ value=True,
+ label="Generate Visualization"
+ )
+
+ include_keypoints_std = gr.Checkbox(
+ value=False,
+ label="Include Keypoints"
+ )
+
+ process_btn_std = gr.Button("Analyze Movement", variant="primary")
+
+ gr.Examples(
+ examples=[
+ ["examples/balette.mov"],
+ ["examples/balette.mp4"],
+ ],
+ inputs=video_input_std,
+ label="Example Videos"
+ )
+
+ with gr.Column(scale=2):
+ with gr.Tab("Analysis Results"):
+ json_output_std = gr.JSON(
+ label="Movement Analysis (JSON)",
+ elem_classes=["json-output"]
+ )
+
+ with gr.Tab("Visualization"):
+ video_output_std = gr.Video(
+ label="Annotated Video",
+ format="mp4"
+ )
+
+ gr.Markdown("""
+ **Visualization Guide:**
+ - 𦴠**Skeleton**: Pose keypoints and connections
+ - π **Trails**: Motion history (fading lines)
+ - β‘οΈ **Arrows**: Movement direction indicators
+ - π¨ **Colors**: Green (low) β Orange (medium) β Red (high) intensity
+ """)
+
+ process_btn_std.click(
+ fn=process_video_standard,
+ inputs=[video_input_std, model_dropdown_std, enable_viz_std, include_keypoints_std],
+ outputs=[json_output_std, video_output_std],
+ api_name="analyze_standard"
+ )
+
+ # Tab 2: Enhanced Analysis
+ with gr.Tab("π Enhanced Analysis"):
+ gr.Markdown("""
+ ### Advanced Analysis with AI and URL Support
+ Analyze videos from URLs (YouTube/Vimeo), use advanced pose models, and get AI-powered insights.
+ """)
+
+ with gr.Row():
+ with gr.Column(scale=1):
+ gr.HTML('')
+ gr.Markdown("**Video Input**")
+
+ # Changed from textbox to file upload as requested
+ video_input_enh = gr.File(
+ label="Upload Video or Drop File",
+ file_types=["video"],
+ type="filepath"
+ )
+
+ # URL input option
+ url_input_enh = gr.Textbox(
+ label="Or Enter Video URL",
+ placeholder="YouTube URL, Vimeo URL, or direct video URL",
+ info="Leave file upload empty to use URL"
+ )
+
+ gr.Examples(
+ examples=[
+ ["examples/balette.mov"],
+ ["https://www.youtube.com/shorts/RX9kH2l3L8U"],
+ ["https://vimeo.com/815392738"]
+ ],
+ inputs=url_input_enh,
+ label="Example URLs"
+ )
+
+ gr.Markdown("**Model Selection**")
+
+ model_select_enh = gr.Dropdown(
+ choices=[
+ # MediaPipe variants
+ "mediapipe-lite", "mediapipe-full", "mediapipe-heavy",
+ # MoveNet variants
+ "movenet-lightning", "movenet-thunder",
+ # YOLO variants (added X models)
+ "yolo-v8-n", "yolo-v8-s", "yolo-v8-m", "yolo-v8-l", "yolo-v8-x",
+ # YOLO v11 variants
+ "yolo-v11-n", "yolo-v11-s", "yolo-v11-m", "yolo-v11-l", "yolo-v11-x"
+ ],
+ value="mediapipe-full",
+ label="Advanced Pose Models",
+ info="17+ model variants available"
+ )
+
+ gr.Markdown("**Analysis Options**")
+
+ with gr.Row():
+ enable_viz_enh = gr.Checkbox(value=True, label="Visualization")
+
+ with gr.Row():
+ include_keypoints_enh = gr.Checkbox(value=False, label="Raw Keypoints")
+
+ analyze_btn_enh = gr.Button("π Enhanced Analysis", variant="primary", size="lg")
+ gr.HTML('
')
+
+ with gr.Column(scale=2):
+ with gr.Tab("π Analysis"):
+ analysis_output_enh = gr.JSON(label="Enhanced Analysis Results")
+
+ with gr.Tab("π₯ Visualization"):
+ viz_output_enh = gr.Video(label="Annotated Video")
+
+ def process_enhanced_input(file_input, url_input, model, enable_viz, include_keypoints):
+ """Process either file upload or URL input."""
+ video_source = file_input if file_input else url_input
+ return process_video_enhanced(video_source, model, enable_viz, include_keypoints)
+
+ analyze_btn_enh.click(
+ fn=process_enhanced_input,
+ inputs=[video_input_enh, url_input_enh, model_select_enh, enable_viz_enh, include_keypoints_enh],
+ outputs=[analysis_output_enh, viz_output_enh],
+ api_name="analyze_enhanced"
+ )
+
+ # Tab 3: Agent API
+ with gr.Tab("π€ Agent API"):
+ gr.Markdown("""
+ ### AI Agent & Automation Features
+ Batch processing, filtering, and structured outputs designed for AI agents and automation.
+ """)
+
+ with gr.Tabs():
+ with gr.Tab("Single Analysis"):
+ with gr.Row():
+ with gr.Column():
+ video_input_agent = gr.Video(label="Upload Video", sources=["upload"])
+ model_select_agent = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Model"
+ )
+ output_format_agent = gr.Radio(
+ choices=["summary", "structured", "json"],
+ value="summary",
+ label="Output Format"
+ )
+ analyze_btn_agent = gr.Button("Analyze", variant="primary")
+
+ with gr.Column():
+ output_display_agent = gr.JSON(label="Agent Output")
+
+ analyze_btn_agent.click(
+ fn=process_video_for_agent,
+ inputs=[video_input_agent, model_select_agent, output_format_agent],
+ outputs=output_display_agent,
+ api_name="analyze_agent"
+ )
+
+ with gr.Tab("Batch Processing"):
+ with gr.Row():
+ with gr.Column():
+ batch_files = gr.File(
+ label="Upload Multiple Videos",
+ file_count="multiple",
+ file_types=["video"]
+ )
+ batch_model = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Model"
+ )
+ batch_btn = gr.Button("Process Batch", variant="primary")
+
+ with gr.Column():
+ batch_output = gr.JSON(label="Batch Results")
+
+ batch_btn.click(
+ fn=batch_process_videos,
+ inputs=[batch_files, batch_model],
+ outputs=batch_output,
+ api_name="batch_analyze"
+ )
+
+ with gr.Tab("Movement Filter"):
+ with gr.Row():
+ with gr.Column():
+ filter_files = gr.File(
+ label="Videos to Filter",
+ file_count="multiple",
+ file_types=["video"]
+ )
+
+ with gr.Group():
+ direction_filter = gr.Dropdown(
+ choices=["any", "up", "down", "left", "right", "stationary"],
+ value="any",
+ label="Direction Filter"
+ )
+ intensity_filter = gr.Dropdown(
+ choices=["any", "low", "medium", "high"],
+ value="any",
+ label="Intensity Filter"
+ )
+ fluidity_threshold = gr.Slider(0.0, 1.0, 0.0, label="Min Fluidity")
+ expansion_threshold = gr.Slider(0.0, 1.0, 0.0, label="Min Expansion")
+
+ filter_btn = gr.Button("Apply Filters", variant="primary")
+
+ with gr.Column():
+ filter_output = gr.JSON(label="Filtered Results")
+
+ filter_btn.click(
+ fn=filter_videos_by_movement,
+ inputs=[filter_files, direction_filter, intensity_filter,
+ fluidity_threshold, expansion_threshold],
+ outputs=filter_output,
+ api_name="filter_videos"
+ )
+
+ # Tab 4: Real-time WebRTC
+ with gr.Tab("πΉ Real-time Analysis"):
+ gr.Markdown("""
+ ### Live Camera Movement Analysis
+ Real-time pose detection and movement analysis from your webcam using WebRTC.
+ **Grant camera permissions when prompted for best experience.**
+ """)
+
+ # Official Gradio WebRTC approach (compatible with NumPy 1.x)
+ if HAS_WEBRTC:
+
+ # Get RTC configuration
+ rtc_config = get_rtc_configuration()
+
+ # Custom CSS following official guide
+ css_webrtc = """
+ .my-group {max-width: 480px !important; max-height: 480px !important;}
+ .my-column {display: flex !important; justify-content: center !important; align-items: center !important;}
+ """
+
+ with gr.Column(elem_classes=["my-column"]):
+ with gr.Group(elem_classes=["my-group"]):
+ # Official WebRTC Component
+ webrtc_stream = WebRTC(
+ label="π₯ Live Camera Stream",
+ rtc_configuration=rtc_config
+ )
+
+ webrtc_model = gr.Dropdown(
+ choices=["mediapipe-lite", "movenet-lightning", "yolo-v11-n"],
+ value="mediapipe-lite",
+ label="Pose Model",
+ info="Optimized for real-time processing"
+ )
+
+ confidence_slider = gr.Slider(
+ label="Detection Confidence",
+ minimum=0.0,
+ maximum=1.0,
+ step=0.05,
+ value=0.5,
+ info="Higher = fewer false positives"
+ )
+
+ # Official WebRTC streaming setup following Gradio guide
+ webrtc_stream.stream(
+ fn=webrtc_detection,
+ inputs=[webrtc_stream, webrtc_model, confidence_slider],
+ outputs=[webrtc_stream],
+ time_limit=10 # Following official guide: 10 seconds per user
+ )
+
+ # Info display
+ gr.HTML("""
+
+
πΉ WebRTC Pose Analysis
+
Real-time movement analysis using your webcam
+
+
π Privacy
+
Processing happens locally - no video data stored
+
+
π‘ Usage
+
+ - Grant camera permission when prompted
+ - Move in front of camera to see pose detection
+ - Adjust confidence threshold as needed
+
+
+ """)
+
+ else:
+ # Fallback if WebRTC component not available
+ gr.HTML("""
+
+
π¦ WebRTC Component Required
+
To enable real-time camera analysis, install:
+
+ pip install gradio-webrtc twilio
+
+
Use Enhanced Analysis tab for video files meanwhile
+
+ """)
+
+ # Tab 5: Model Comparison
+ with gr.Tab("βοΈ Model Comparison"):
+ gr.Markdown("""
+ ### Compare Pose Estimation Models
+ Analyze the same video with different models to compare accuracy and results.
+ """)
+
+ with gr.Column():
+ comparison_video = gr.Video(
+ label="Video for Comparison",
+ sources=["upload"]
+ )
+
+ with gr.Row():
+ model1_comp = gr.Dropdown(
+ choices=["mediapipe-full", "movenet-thunder", "yolo-v11-s"],
+ value="mediapipe-full",
+ label="Model 1"
+ )
+
+ model2_comp = gr.Dropdown(
+ choices=["mediapipe-full", "movenet-thunder", "yolo-v11-s"],
+ value="yolo-v11-s",
+ label="Model 2"
+ )
+
+ compare_btn = gr.Button("π Compare Models", variant="primary")
+
+ comparison_results = gr.DataFrame(
+ headers=["Metric", "Model 1", "Model 2", "Match"],
+ label="Comparison Results"
+ )
+
+ compare_btn.click(
+ fn=compare_models,
+ inputs=[comparison_video, model1_comp, model2_comp],
+ outputs=comparison_results,
+ api_name="compare_models"
+ )
+
+ # Tab 6: Documentation
+ with gr.Tab("π Documentation"):
+ gr.Markdown("""
+ # Complete Feature Documentation
+
+ ## π₯ Video Input Support
+ - **Local Files**: MP4, AVI, MOV, WebM formats
+ - **YouTube**: Automatic download from YouTube URLs
+ - **Vimeo**: Automatic download from Vimeo URLs
+ - **Direct URLs**: Any direct video file URL
+
+ ## π€ Pose Estimation Models
+
+ ### MediaPipe (Google) - 33 3D Landmarks
+ - **Lite**: Fastest CPU performance
+ - **Full**: Balanced accuracy/speed (recommended)
+ - **Heavy**: Highest accuracy
+
+ ### MoveNet (Google) - 17 COCO Keypoints
+ - **Lightning**: Mobile-optimized, very fast
+ - **Thunder**: Higher accuracy variant
+
+ ### YOLO (Ultralytics) - 17 COCO Keypoints
+ - **v8 variants**: n/s/m/l/x sizes (nano to extra-large)
+ - **v11 variants**: Latest with improved accuracy (n/s/m/l/x)
+ - **Multi-person**: Supports multiple people in frame
+
+ ## πΉ Real-time WebRTC
+
+ - **Live Camera**: Direct webcam access via WebRTC
+ - **Low Latency**: Sub-100ms processing
+ - **Adaptive Quality**: Automatic performance optimization
+ - **Live Overlay**: Real-time pose and metrics display
+
+ ## π€ Agent & MCP Integration
+
+ ### API Endpoints
+ - `/analyze_standard` - Basic LMA analysis
+ - `/analyze_enhanced` - Advanced analysis with all features
+ - `/analyze_agent` - Agent-optimized output
+ - `/batch_analyze` - Multiple video processing
+ - `/filter_videos` - Movement-based filtering
+ - `/compare_models` - Model comparison
+
+ ### MCP Server
+ ```bash
+ # Start MCP server for AI assistants
+ python -m backend.mcp_server
+ ```
+
+ ### Python API
+ ```python
+ from gradio_labanmovementanalysis import LabanMovementAnalysis
+
+ # Initialize with all features
+ analyzer = LabanMovementAnalysis(
+ enable_webrtc=True
+ )
+
+ # Analyze YouTube video
+ result, viz = analyzer.process_video(
+ "https://youtube.com/watch?v=...",
+ model="yolo-v11-s"
+ )
+ ```
+
+ ## π Output Formats
+
+ ### Summary Format
+ Human-readable movement analysis summary.
+
+ ### Structured Format
+ ```json
+ {
+ "success": true,
+ "direction": "up",
+ "intensity": "medium",
+ "fluidity": 0.85,
+ "expansion": 0.72
+ }
+ ```
+
+ ### Full JSON Format
+ Complete frame-by-frame analysis with all metrics.
+
+ ## π― Applications
+
+ - **Sports**: Technique analysis and performance tracking
+ - **Dance**: Choreography analysis and movement quality
+ - **Healthcare**: Physical therapy and rehabilitation
+ - **Research**: Large-scale movement pattern studies
+ - **Entertainment**: Interactive applications and games
+ - **Education**: Movement teaching and body awareness
+
+ ## π Integration Examples
+
+ ### Gradio Client
+ ```python
+ from gradio_client import Client
+
+ client = Client("http://localhost:7860")
+ result = client.predict(
+ video="path/to/video.mp4",
+ model="mediapipe-full",
+ api_name="/analyze_enhanced"
+ )
+ ```
+
+ ### Batch Processing
+ ```python
+ results = client.predict(
+ files=["video1.mp4", "video2.mp4"],
+ model="yolo-v11-s",
+ api_name="/batch_analyze"
+ )
+ ```
+ """)
+ gr.HTML("""
+
+ """)
+
+ # Footer with proper attribution
+ gr.HTML("""
+
+
+ π Laban Movement Analysis - Complete Suite | Heavy Beta Version
+
+
+ Created by Csaba BolyΓ³s | Powered by MediaPipe, MoveNet & YOLO
+
+
+ GitHub β’
+ Hugging Face β’
+ LinkedIn
+
+
+ """)
+
+ return demo
+
+
+if __name__ == "__main__":
+ demo = create_unified_demo()
+ demo.launch(
+ server_name="0.0.0.0",
+ server_port=7860,
+ share=False,
+ show_error=True,
+ favicon_path=None
+ )
diff --git a/demo/css.css b/demo/css.css
new file mode 100644
index 0000000000000000000000000000000000000000..f7256be42f9884d89b499b0f5a6cfcbed3d54c80
--- /dev/null
+++ b/demo/css.css
@@ -0,0 +1,157 @@
+html {
+ font-family: Inter;
+ font-size: 16px;
+ font-weight: 400;
+ line-height: 1.5;
+ -webkit-text-size-adjust: 100%;
+ background: #fff;
+ color: #323232;
+ -webkit-font-smoothing: antialiased;
+ -moz-osx-font-smoothing: grayscale;
+ text-rendering: optimizeLegibility;
+}
+
+:root {
+ --space: 1;
+ --vspace: calc(var(--space) * 1rem);
+ --vspace-0: calc(3 * var(--space) * 1rem);
+ --vspace-1: calc(2 * var(--space) * 1rem);
+ --vspace-2: calc(1.5 * var(--space) * 1rem);
+ --vspace-3: calc(0.5 * var(--space) * 1rem);
+}
+
+.app {
+ max-width: 748px !important;
+}
+
+.prose p {
+ margin: var(--vspace) 0;
+ line-height: var(--vspace * 2);
+ font-size: 1rem;
+}
+
+code {
+ font-family: "Inconsolata", sans-serif;
+ font-size: 16px;
+}
+
+h1,
+h1 code {
+ font-weight: 400;
+ line-height: calc(2.5 / var(--space) * var(--vspace));
+}
+
+h1 code {
+ background: none;
+ border: none;
+ letter-spacing: 0.05em;
+ padding-bottom: 5px;
+ position: relative;
+ padding: 0;
+}
+
+h2 {
+ margin: var(--vspace-1) 0 var(--vspace-2) 0;
+ line-height: 1em;
+}
+
+h3,
+h3 code {
+ margin: var(--vspace-1) 0 var(--vspace-2) 0;
+ line-height: 1em;
+}
+
+h4,
+h5,
+h6 {
+ margin: var(--vspace-3) 0 var(--vspace-3) 0;
+ line-height: var(--vspace);
+}
+
+.bigtitle,
+h1,
+h1 code {
+ font-size: calc(8px * 4.5);
+ word-break: break-word;
+}
+
+.title,
+h2,
+h2 code {
+ font-size: calc(8px * 3.375);
+ font-weight: lighter;
+ word-break: break-word;
+ border: none;
+ background: none;
+}
+
+.subheading1,
+h3,
+h3 code {
+ font-size: calc(8px * 1.8);
+ font-weight: 600;
+ border: none;
+ background: none;
+ letter-spacing: 0.1em;
+ text-transform: uppercase;
+}
+
+h2 code {
+ padding: 0;
+ position: relative;
+ letter-spacing: 0.05em;
+}
+
+blockquote {
+ font-size: calc(8px * 1.1667);
+ font-style: italic;
+ line-height: calc(1.1667 * var(--vspace));
+ margin: var(--vspace-2) var(--vspace-2);
+}
+
+.subheading2,
+h4 {
+ font-size: calc(8px * 1.4292);
+ text-transform: uppercase;
+ font-weight: 600;
+}
+
+.subheading3,
+h5 {
+ font-size: calc(8px * 1.2917);
+ line-height: calc(1.2917 * var(--vspace));
+
+ font-weight: lighter;
+ text-transform: uppercase;
+ letter-spacing: 0.15em;
+}
+
+h6 {
+ font-size: calc(8px * 1.1667);
+ font-size: 1.1667em;
+ font-weight: normal;
+ font-style: italic;
+ font-family: "le-monde-livre-classic-byol", serif !important;
+ letter-spacing: 0px !important;
+}
+
+#start .md > *:first-child {
+ margin-top: 0;
+}
+
+h2 + h3 {
+ margin-top: 0;
+}
+
+.md hr {
+ border: none;
+ border-top: 1px solid var(--block-border-color);
+ margin: var(--vspace-2) 0 var(--vspace-2) 0;
+}
+.prose ul {
+ margin: var(--vspace-2) 0 var(--vspace-1) 0;
+}
+
+.gap {
+ gap: 0;
+}
diff --git a/demo/requirements.txt b/demo/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..4ad70151af0f351e697cd51c781401956a7897cf
--- /dev/null
+++ b/demo/requirements.txt
@@ -0,0 +1 @@
+gradio_labanmovementanalysis
\ No newline at end of file
diff --git a/demo/space.py b/demo/space.py
new file mode 100644
index 0000000000000000000000000000000000000000..c15fff668a38392e6261daf13ca2c1e87216e44e
--- /dev/null
+++ b/demo/space.py
@@ -0,0 +1,983 @@
+
+import gradio as gr
+from app import demo as app
+import os
+
+_docs = {'LabanMovementAnalysis': {'description': 'Gradio component for video-based pose analysis with Laban Movement Analysis metrics.', 'members': {'__init__': {'default_model': {'type': 'str', 'default': '"mediapipe"', 'description': 'Default pose estimation model ("mediapipe", "movenet", "yolo")'}, 'enable_visualization': {'type': 'bool', 'default': 'True', 'description': 'Whether to generate visualization video by default'}, 'include_keypoints': {'type': 'bool', 'default': 'False', 'description': 'Whether to include raw keypoints in JSON output'}, 'enable_webrtc': {'type': 'bool', 'default': 'False', 'description': 'Whether to enable WebRTC real-time analysis'}, 'label': {'type': 'typing.Optional[str][str, None]', 'default': 'None', 'description': 'Component label'}, 'every': {'type': 'typing.Optional[float][float, None]', 'default': 'None', 'description': None}, 'show_label': {'type': 'typing.Optional[bool][bool, None]', 'default': 'None', 'description': None}, 'container': {'type': 'bool', 'default': 'True', 'description': None}, 'scale': {'type': 'typing.Optional[int][int, None]', 'default': 'None', 'description': None}, 'min_width': {'type': 'int', 'default': '160', 'description': None}, 'interactive': {'type': 'typing.Optional[bool][bool, None]', 'default': 'None', 'description': None}, 'visible': {'type': 'bool', 'default': 'True', 'description': None}, 'elem_id': {'type': 'typing.Optional[str][str, None]', 'default': 'None', 'description': None}, 'elem_classes': {'type': 'typing.Optional[typing.List[str]][\n typing.List[str][str], None\n]', 'default': 'None', 'description': None}, 'render': {'type': 'bool', 'default': 'True', 'description': None}}, 'postprocess': {'value': {'type': 'typing.Any', 'description': 'Analysis results'}}, 'preprocess': {'return': {'type': 'typing.Dict[str, typing.Any][str, typing.Any]', 'description': 'Processed data for analysis'}, 'value': None}}, 'events': {}}, '__meta__': {'additional_interfaces': {}, 'user_fn_refs': {'LabanMovementAnalysis': []}}}
+
+abs_path = os.path.join(os.path.dirname(__file__), "css.css")
+
+with gr.Blocks(
+ css=abs_path,
+ theme=gr.themes.Default(
+ font_mono=[
+ gr.themes.GoogleFont("Inconsolata"),
+ "monospace",
+ ],
+ ),
+) as demo:
+ gr.Markdown(
+"""
+# `gradio_labanmovementanalysis`
+
+
+

+
+
+A Gradio 5 component for video movement analysis using Laban Movement Analysis (LMA) with MCP support for AI agents
+""", elem_classes=["md-custom"], header_links=True)
+ app.render()
+ gr.Markdown(
+"""
+## Installation
+
+```bash
+pip install gradio_labanmovementanalysis
+```
+
+## Usage
+
+```python
+\"\"\"
+Unified Laban Movement Analysis Demo
+Comprehensive interface combining all features:
+- Standard LMA analysis
+- Enhanced features (WebRTC, YouTube/Vimeo)
+- Agent API (batch processing, filtering)
+- Real-time analysis
+- Model comparison
+
+Created by: Csaba BolyΓ³s (BladeSzaSza)
+Contact: bladeszasza@gmail.com
+GitHub: https://github.com/bladeszasza
+LinkedIn: https://www.linkedin.com/in/csaba-bolyΓ³s-00a11767/
+Hugging Face: https://huggingface.co/BladeSzaSza
+
+Heavy Beta Version - Under Active Development
+\"\"\"
+
+import gradio as gr
+import sys
+from pathlib import Path
+from typing import Dict, Any, List, Tuple
+
+# Add parent directory to path
+sys.path.insert(0, str(Path(__file__).parent.parent / "backend"))
+
+from gradio_labanmovementanalysis import LabanMovementAnalysis
+
+# Import agent API if available
+try:
+ from gradio_labanmovementanalysis.agent_api import (
+ LabanAgentAPI,
+ PoseModel,
+ MovementDirection,
+ MovementIntensity
+ )
+ HAS_AGENT_API = True
+except ImportError:
+ HAS_AGENT_API = False
+
+# Import WebRTC components if available
+try:
+ from gradio_webrtc import WebRTC
+ from gradio_labanmovementanalysis.webrtc_handler import (
+ webrtc_detection,
+ get_rtc_configuration
+ )
+ HAS_WEBRTC = True
+except ImportError as e:
+ print(f"WebRTC import failed: {e}")
+ HAS_WEBRTC = False
+
+# Initialize components
+try:
+ # Initialize with WebRTC support
+ analyzer = LabanMovementAnalysis(
+ enable_webrtc=True,
+ enable_visualization=True
+ )
+ print("β
Core features initialized successfully")
+except Exception as e:
+ print(f"Warning: Some features may not be available: {e}")
+ analyzer = LabanMovementAnalysis(enable_webrtc=False)
+
+# Initialize agent API if available
+agent_api = None
+if HAS_AGENT_API:
+ try:
+ agent_api = LabanAgentAPI()
+ except Exception as e:
+ print(f"Warning: Agent API not available: {e}")
+ agent_api = None
+
+
+def process_video_standard(video, model, enable_viz, include_keypoints):
+ \"\"\"Standard video processing function.\"\"\"
+ if video is None:
+ return None, None
+
+ try:
+ json_output, video_output = analyzer.process_video(
+ video,
+ model=model,
+ enable_visualization=enable_viz,
+ include_keypoints=include_keypoints
+ )
+ return json_output, video_output
+ except Exception as e:
+ return {"error": str(e)}, None
+
+
+def process_video_enhanced(video_input, model, enable_viz, include_keypoints):
+ \"\"\"Enhanced video processing with all new features.\"\"\"
+ if not video_input:
+ return {"error": "No video provided"}, None
+
+ try:
+ # Handle both file upload and URL input
+ video_path = video_input.name if hasattr(video_input, 'name') else video_input
+
+ json_result, viz_result = analyzer.process_video(
+ video_path,
+ model=model,
+ enable_visualization=enable_viz,
+ include_keypoints=include_keypoints
+ )
+ return json_result, viz_result
+ except Exception as e:
+ error_result = {"error": str(e)}
+ return error_result, None
+
+
+def process_video_for_agent(video, model, output_format="summary"):
+ \"\"\"Process video with agent-friendly output format.\"\"\"
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not video:
+ return {"error": "No video provided"}
+
+ try:
+ model_enum = PoseModel(model)
+ result = agent_api.analyze(video, model=model_enum, generate_visualization=False)
+
+ if output_format == "summary":
+ return {"summary": agent_api.get_movement_summary(result)}
+ elif output_format == "structured":
+ return {
+ "success": result.success,
+ "direction": result.dominant_direction.value,
+ "intensity": result.dominant_intensity.value,
+ "speed": result.dominant_speed,
+ "fluidity": result.fluidity_score,
+ "expansion": result.expansion_score,
+ "segments": len(result.movement_segments)
+ }
+ else: # json
+ return result.raw_data
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def batch_process_videos(files, model):
+ \"\"\"Process multiple videos in batch.\"\"\"
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not files:
+ return {"error": "No videos provided"}
+
+ try:
+ video_paths = [f.name for f in files]
+ results = agent_api.batch_analyze(video_paths, model=PoseModel(model), parallel=True)
+
+ output = {
+ "total_videos": len(results),
+ "successful": sum(1 for r in results if r.success),
+ "failed": sum(1 for r in results if not r.success),
+ "results": []
+ }
+
+ for result in results:
+ output["results"].append({
+ "video": Path(result.video_path).name,
+ "success": result.success,
+ "summary": agent_api.get_movement_summary(result) if result.success else result.error
+ })
+
+ return output
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def filter_videos_by_movement(files, direction, intensity, min_fluidity, min_expansion):
+ \"\"\"Filter videos based on movement characteristics.\"\"\"
+ if not HAS_AGENT_API or agent_api is None:
+ return {"error": "Agent API not available"}
+
+ if not files:
+ return {"error": "No videos provided"}
+
+ try:
+ video_paths = [f.name for f in files]
+
+ dir_filter = MovementDirection(direction) if direction != "any" else None
+ int_filter = MovementIntensity(intensity) if intensity != "any" else None
+
+ filtered = agent_api.filter_by_movement(
+ video_paths,
+ direction=dir_filter,
+ intensity=int_filter,
+ min_fluidity=min_fluidity if min_fluidity > 0 else None,
+ min_expansion=min_expansion if min_expansion > 0 else None
+ )
+
+ return {
+ "total_analyzed": len(video_paths),
+ "matching_videos": len(filtered),
+ "matches": [
+ {
+ "video": Path(r.video_path).name,
+ "direction": r.dominant_direction.value,
+ "intensity": r.dominant_intensity.value,
+ "fluidity": r.fluidity_score,
+ "expansion": r.expansion_score
+ }
+ for r in filtered
+ ]
+ }
+ except Exception as e:
+ return {"error": str(e)}
+
+
+def compare_models(video, model1, model2):
+ \"\"\"Compare two different pose models on the same video.\"\"\"
+ if not video:
+ return "No video provided"
+
+ try:
+ # Analyze with both models
+ result1, _ = analyzer.process_video(video, model=model1, enable_visualization=False)
+ result2, _ = analyzer.process_video(video, model=model2, enable_visualization=False)
+
+ # Extract key metrics for comparison
+ def extract_metrics(result):
+ summary = result.get("movement_analysis", {}).get("summary", {})
+ return {
+ "direction": summary.get("direction", {}).get("dominant", "unknown"),
+ "intensity": summary.get("intensity", {}).get("dominant", "unknown"),
+ "speed": summary.get("speed", {}).get("dominant", "unknown"),
+ "frame_count": result.get("video_info", {}).get("frame_count", 0)
+ }
+
+ metrics1 = extract_metrics(result1)
+ metrics2 = extract_metrics(result2)
+
+ # Create comparison table data
+ comparison_data = [
+ ["Direction", metrics1["direction"], metrics2["direction"],
+ "β" if metrics1["direction"] == metrics2["direction"] else "β"],
+ ["Intensity", metrics1["intensity"], metrics2["intensity"],
+ "β" if metrics1["intensity"] == metrics2["intensity"] else "β"],
+ ["Speed", metrics1["speed"], metrics2["speed"],
+ "β" if metrics1["speed"] == metrics2["speed"] else "β"],
+ ["Frames Processed", str(metrics1["frame_count"]), str(metrics2["frame_count"]),
+ "β" if metrics1["frame_count"] == metrics2["frame_count"] else "β"]
+ ]
+
+ return comparison_data
+
+ except Exception as e:
+ return [["Error", str(e), "", ""]]
+
+
+def start_webrtc_stream(model):
+ \"\"\"Start WebRTC real-time analysis.\"\"\"
+ try:
+ success = analyzer.start_webrtc_stream(model)
+ if success:
+ return "π’ Stream Active", {"status": "streaming", "model": model}
+ else:
+ return "π΄ Failed to start", {"status": "error"}
+ except Exception as e:
+ return f"π΄ Error: {str(e)}", {"status": "error"}
+
+
+def stop_webrtc_stream():
+ \"\"\"Stop WebRTC real-time analysis.\"\"\"
+ try:
+ success = analyzer.stop_webrtc_stream()
+ if success:
+ return "π‘ Stream Stopped", {"status": "stopped"}
+ else:
+ return "π΄ Failed to stop", {"status": "error"}
+ except Exception as e:
+ return f"π΄ Error: {str(e)}", {"status": "error"}
+
+
+def create_unified_demo():
+ \"\"\"Create the unified comprehensive demo.\"\"\"
+
+ with gr.Blocks(
+ title="Laban Movement Analysis - Complete Suite by Csaba BolyΓ³s",
+ theme=gr.themes.Soft(),
+ css=\"\"\"
+ .main-header {
+ background: linear-gradient(135deg, #40826D 0%, #2E5E4A 50%, #1B3A2F 100%);
+ color: white;
+ padding: 30px;
+ border-radius: 10px;
+ margin-bottom: 20px;
+ text-align: center;
+ }
+ .feature-card {
+ border: 1px solid #e1e5e9;
+ border-radius: 8px;
+ padding: 16px;
+ margin: 8px 0;
+ background: #f8f9fa;
+ }
+ .json-output {
+ max-height: 600px;
+ overflow-y: auto;
+ font-family: monospace;
+ font-size: 12px;
+ }
+ .author-info {
+ background: linear-gradient(135deg, #40826D 0%, #2E5E4A 100%);
+ color: white;
+ padding: 15px;
+ border-radius: 8px;
+ margin: 10px 0;
+ text-align: center;
+ }
+ \"\"\"
+ ) as demo:
+
+ # Main Header
+ gr.HTML(\"\"\"
+
+
π Laban Movement Analysis - Complete Suite
+
+ Professional movement analysis with pose estimation, AI action recognition,
+ real-time processing, and agent automation
+
+
+ Supports YouTube/Vimeo URLs β’ WebRTC Streaming β’ 20+ Pose Models β’ MCP Integration
+
+
+ Version 0.01-beta - Heavy Beta Under Active Development
+
+
+ \"\"\")
+
+ with gr.Tabs():
+ # Tab 1: Standard Analysis
+ with gr.Tab("π¬ Standard Analysis"):
+ gr.Markdown(\"\"\"
+ ### Classic Laban Movement Analysis
+ Upload a video file to analyze movement using traditional LMA metrics with pose estimation.
+ \"\"\")
+
+ with gr.Row():
+ with gr.Column(scale=1):
+ video_input_std = gr.Video(
+ label="Upload Video",
+ sources=["upload"],
+ format="mp4"
+ )
+
+ model_dropdown_std = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Pose Estimation Model"
+ )
+
+ with gr.Row():
+ enable_viz_std = gr.Checkbox(
+ value=True,
+ label="Generate Visualization"
+ )
+
+ include_keypoints_std = gr.Checkbox(
+ value=False,
+ label="Include Keypoints"
+ )
+
+ process_btn_std = gr.Button("Analyze Movement", variant="primary")
+
+ gr.Examples(
+ examples=[
+ ["examples/balette.mp4"],
+ ],
+ inputs=video_input_std,
+ label="Example Videos"
+ )
+
+ with gr.Column(scale=2):
+ with gr.Tab("Analysis Results"):
+ json_output_std = gr.JSON(
+ label="Movement Analysis (JSON)",
+ elem_classes=["json-output"]
+ )
+
+ with gr.Tab("Visualization"):
+ video_output_std = gr.Video(
+ label="Annotated Video",
+ format="mp4"
+ )
+
+ gr.Markdown(\"\"\"
+ **Visualization Guide:**
+ - 𦴠**Skeleton**: Pose keypoints and connections
+ - π **Trails**: Motion history (fading lines)
+ - β‘οΈ **Arrows**: Movement direction indicators
+ - π¨ **Colors**: Green (low) β Orange (medium) β Red (high) intensity
+ \"\"\")
+
+ process_btn_std.click(
+ fn=process_video_standard,
+ inputs=[video_input_std, model_dropdown_std, enable_viz_std, include_keypoints_std],
+ outputs=[json_output_std, video_output_std],
+ api_name="analyze_standard"
+ )
+
+ # Tab 2: Enhanced Analysis
+ with gr.Tab("π Enhanced Analysis"):
+ gr.Markdown(\"\"\"
+ ### Advanced Analysis with AI and URL Support
+ Analyze videos from URLs (YouTube/Vimeo), use advanced pose models, and get AI-powered insights.
+ \"\"\")
+
+ with gr.Row():
+ with gr.Column(scale=1):
+ gr.HTML('')
+ gr.Markdown("**Video Input**")
+
+ # Changed from textbox to file upload as requested
+ video_input_enh = gr.File(
+ label="Upload Video or Drop File",
+ file_types=["video"],
+ type="filepath"
+ )
+
+ # URL input option
+ url_input_enh = gr.Textbox(
+ label="Or Enter Video URL",
+ placeholder="YouTube URL, Vimeo URL, or direct video URL",
+ info="Leave file upload empty to use URL"
+ )
+
+ gr.Examples(
+ examples=[
+ ["examples/balette.mp4"],
+ ["https://www.youtube.com/shorts/RX9kH2l3L8U"],
+ ["https://vimeo.com/815392738"]
+ ],
+ inputs=url_input_enh,
+ label="Example URLs"
+ )
+
+ gr.Markdown("**Model Selection**")
+
+ model_select_enh = gr.Dropdown(
+ choices=[
+ # MediaPipe variants
+ "mediapipe-lite", "mediapipe-full", "mediapipe-heavy",
+ # MoveNet variants
+ "movenet-lightning", "movenet-thunder",
+ # YOLO variants (added X models)
+ "yolo-v8-n", "yolo-v8-s", "yolo-v8-m", "yolo-v8-l", "yolo-v8-x",
+ # YOLO v11 variants
+ "yolo-v11-n", "yolo-v11-s", "yolo-v11-m", "yolo-v11-l", "yolo-v11-x"
+ ],
+ value="mediapipe-full",
+ label="Advanced Pose Models",
+ info="17+ model variants available"
+ )
+
+ gr.Markdown("**Analysis Options**")
+
+ with gr.Row():
+ enable_viz_enh = gr.Checkbox(value=True, label="Visualization")
+
+ with gr.Row():
+ include_keypoints_enh = gr.Checkbox(value=False, label="Raw Keypoints")
+
+ analyze_btn_enh = gr.Button("π Enhanced Analysis", variant="primary", size="lg")
+ gr.HTML('
')
+
+ with gr.Column(scale=2):
+ with gr.Tab("π Analysis"):
+ analysis_output_enh = gr.JSON(label="Enhanced Analysis Results")
+
+ with gr.Tab("π₯ Visualization"):
+ viz_output_enh = gr.Video(label="Annotated Video")
+
+ def process_enhanced_input(file_input, url_input, model, enable_viz, include_keypoints):
+ \"\"\"Process either file upload or URL input.\"\"\"
+ video_source = file_input if file_input else url_input
+ return process_video_enhanced(video_source, model, enable_viz, include_keypoints)
+
+ analyze_btn_enh.click(
+ fn=process_enhanced_input,
+ inputs=[video_input_enh, url_input_enh, model_select_enh, enable_viz_enh, include_keypoints_enh],
+ outputs=[analysis_output_enh, viz_output_enh],
+ api_name="analyze_enhanced"
+ )
+
+ # Tab 3: Agent API
+ with gr.Tab("π€ Agent API"):
+ gr.Markdown(\"\"\"
+ ### AI Agent & Automation Features
+ Batch processing, filtering, and structured outputs designed for AI agents and automation.
+ \"\"\")
+
+ with gr.Tabs():
+ with gr.Tab("Single Analysis"):
+ with gr.Row():
+ with gr.Column():
+ video_input_agent = gr.Video(label="Upload Video", sources=["upload"])
+ model_select_agent = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Model"
+ )
+ output_format_agent = gr.Radio(
+ choices=["summary", "structured", "json"],
+ value="summary",
+ label="Output Format"
+ )
+ analyze_btn_agent = gr.Button("Analyze", variant="primary")
+
+ with gr.Column():
+ output_display_agent = gr.JSON(label="Agent Output")
+
+ analyze_btn_agent.click(
+ fn=process_video_for_agent,
+ inputs=[video_input_agent, model_select_agent, output_format_agent],
+ outputs=output_display_agent,
+ api_name="analyze_agent"
+ )
+
+ with gr.Tab("Batch Processing"):
+ with gr.Row():
+ with gr.Column():
+ batch_files = gr.File(
+ label="Upload Multiple Videos",
+ file_count="multiple",
+ file_types=["video"]
+ )
+ batch_model = gr.Dropdown(
+ choices=["mediapipe", "movenet", "yolo"],
+ value="mediapipe",
+ label="Model"
+ )
+ batch_btn = gr.Button("Process Batch", variant="primary")
+
+ with gr.Column():
+ batch_output = gr.JSON(label="Batch Results")
+
+ batch_btn.click(
+ fn=batch_process_videos,
+ inputs=[batch_files, batch_model],
+ outputs=batch_output,
+ api_name="batch_analyze"
+ )
+
+ with gr.Tab("Movement Filter"):
+ with gr.Row():
+ with gr.Column():
+ filter_files = gr.File(
+ label="Videos to Filter",
+ file_count="multiple",
+ file_types=["video"]
+ )
+
+ with gr.Group():
+ direction_filter = gr.Dropdown(
+ choices=["any", "up", "down", "left", "right", "stationary"],
+ value="any",
+ label="Direction Filter"
+ )
+ intensity_filter = gr.Dropdown(
+ choices=["any", "low", "medium", "high"],
+ value="any",
+ label="Intensity Filter"
+ )
+ fluidity_threshold = gr.Slider(0.0, 1.0, 0.0, label="Min Fluidity")
+ expansion_threshold = gr.Slider(0.0, 1.0, 0.0, label="Min Expansion")
+
+ filter_btn = gr.Button("Apply Filters", variant="primary")
+
+ with gr.Column():
+ filter_output = gr.JSON(label="Filtered Results")
+
+ filter_btn.click(
+ fn=filter_videos_by_movement,
+ inputs=[filter_files, direction_filter, intensity_filter,
+ fluidity_threshold, expansion_threshold],
+ outputs=filter_output,
+ api_name="filter_videos"
+ )
+
+ # Tab 4: Real-time WebRTC
+ with gr.Tab("πΉ Real-time Analysis"):
+ gr.Markdown(\"\"\"
+ ### Live Camera Movement Analysis
+ Real-time pose detection and movement analysis from your webcam using WebRTC.
+ **Grant camera permissions when prompted for best experience.**
+ \"\"\")
+
+ # Official Gradio WebRTC approach (compatible with NumPy 1.x)
+ if HAS_WEBRTC:
+
+ # Get RTC configuration
+ rtc_config = get_rtc_configuration()
+
+ # Custom CSS following official guide
+ css_webrtc = \"\"\"
+ .my-group {max-width: 480px !important; max-height: 480px !important;}
+ .my-column {display: flex !important; justify-content: center !important; align-items: center !important;}
+ \"\"\"
+
+ with gr.Column(elem_classes=["my-column"]):
+ with gr.Group(elem_classes=["my-group"]):
+ # Official WebRTC Component
+ webrtc_stream = WebRTC(
+ label="π₯ Live Camera Stream",
+ rtc_configuration=rtc_config
+ )
+
+ webrtc_model = gr.Dropdown(
+ choices=["mediapipe-lite", "movenet-lightning", "yolo-v11-n"],
+ value="mediapipe-lite",
+ label="Pose Model",
+ info="Optimized for real-time processing"
+ )
+
+ confidence_slider = gr.Slider(
+ label="Detection Confidence",
+ minimum=0.0,
+ maximum=1.0,
+ step=0.05,
+ value=0.5,
+ info="Higher = fewer false positives"
+ )
+
+ # Official WebRTC streaming setup following Gradio guide
+ webrtc_stream.stream(
+ fn=webrtc_detection,
+ inputs=[webrtc_stream, webrtc_model, confidence_slider],
+ outputs=[webrtc_stream],
+ time_limit=10 # Following official guide: 10 seconds per user
+ )
+
+ # Info display
+ gr.HTML(\"\"\"
+
+
πΉ WebRTC Pose Analysis
+
Real-time movement analysis using your webcam
+
+
π Privacy
+
Processing happens locally - no video data stored
+
+
π‘ Usage
+
+ - Grant camera permission when prompted
+ - Move in front of camera to see pose detection
+ - Adjust confidence threshold as needed
+
+
+ \"\"\")
+
+ else:
+ # Fallback if WebRTC component not available
+ gr.HTML(\"\"\"
+
+
π¦ WebRTC Component Required
+
To enable real-time camera analysis, install:
+
+ pip install gradio-webrtc twilio
+
+
Use Enhanced Analysis tab for video files meanwhile
+
+ \"\"\")
+
+ # Tab 5: Model Comparison
+ with gr.Tab("βοΈ Model Comparison"):
+ gr.Markdown(\"\"\"
+ ### Compare Pose Estimation Models
+ Analyze the same video with different models to compare accuracy and results.
+ \"\"\")
+
+ with gr.Column():
+ comparison_video = gr.Video(
+ label="Video for Comparison",
+ sources=["upload"]
+ )
+
+ with gr.Row():
+ model1_comp = gr.Dropdown(
+ choices=["mediapipe-full", "movenet-thunder", "yolo-v11-s"],
+ value="mediapipe-full",
+ label="Model 1"
+ )
+
+ model2_comp = gr.Dropdown(
+ choices=["mediapipe-full", "movenet-thunder", "yolo-v11-s"],
+ value="yolo-v11-s",
+ label="Model 2"
+ )
+
+ compare_btn = gr.Button("π Compare Models", variant="primary")
+
+ comparison_results = gr.DataFrame(
+ headers=["Metric", "Model 1", "Model 2", "Match"],
+ label="Comparison Results"
+ )
+
+ compare_btn.click(
+ fn=compare_models,
+ inputs=[comparison_video, model1_comp, model2_comp],
+ outputs=comparison_results,
+ api_name="compare_models"
+ )
+
+ # Tab 6: Documentation
+ with gr.Tab("π Documentation"):
+ gr.Markdown(\"\"\"
+ # Complete Feature Documentation
+
+ ## π₯ Video Input Support
+ - **Local Files**: MP4, AVI, MOV, WebM formats
+ - **YouTube**: Automatic download from YouTube URLs
+ - **Vimeo**: Automatic download from Vimeo URLs
+ - **Direct URLs**: Any direct video file URL
+
+ ## π€ Pose Estimation Models
+
+ ### MediaPipe (Google) - 33 3D Landmarks
+ - **Lite**: Fastest CPU performance
+ - **Full**: Balanced accuracy/speed (recommended)
+ - **Heavy**: Highest accuracy
+
+ ### MoveNet (Google) - 17 COCO Keypoints
+ - **Lightning**: Mobile-optimized, very fast
+ - **Thunder**: Higher accuracy variant
+
+ ### YOLO (Ultralytics) - 17 COCO Keypoints
+ - **v8 variants**: n/s/m/l/x sizes (nano to extra-large)
+ - **v11 variants**: Latest with improved accuracy (n/s/m/l/x)
+ - **Multi-person**: Supports multiple people in frame
+
+ ## πΉ Real-time WebRTC
+
+ - **Live Camera**: Direct webcam access via WebRTC
+ - **Low Latency**: Sub-100ms processing
+ - **Adaptive Quality**: Automatic performance optimization
+ - **Live Overlay**: Real-time pose and metrics display
+
+ ## π€ Agent & MCP Integration
+
+ ### API Endpoints
+ - `/analyze_standard` - Basic LMA analysis
+ - `/analyze_enhanced` - Advanced analysis with all features
+ - `/analyze_agent` - Agent-optimized output
+ - `/batch_analyze` - Multiple video processing
+ - `/filter_videos` - Movement-based filtering
+ - `/compare_models` - Model comparison
+
+ ### MCP Server
+ ```bash
+ # Start MCP server for AI assistants
+ python -m backend.mcp_server
+ ```
+
+ ### Python API
+ ```python
+ from gradio_labanmovementanalysis import LabanMovementAnalysis
+
+ # Initialize with all features
+ analyzer = LabanMovementAnalysis(
+ enable_webrtc=True
+ )
+
+ # Analyze YouTube video
+ result, viz = analyzer.process_video(
+ "https://youtube.com/watch?v=...",
+ model="yolo-v11-s"
+ )
+ ```
+
+ ## π Output Formats
+
+ ### Summary Format
+ Human-readable movement analysis summary.
+
+ ### Structured Format
+ ```json
+ {
+ "success": true,
+ "direction": "up",
+ "intensity": "medium",
+ "fluidity": 0.85,
+ "expansion": 0.72
+ }
+ ```
+
+ ### Full JSON Format
+ Complete frame-by-frame analysis with all metrics.
+
+ ## π― Applications
+
+ - **Sports**: Technique analysis and performance tracking
+ - **Dance**: Choreography analysis and movement quality
+ - **Healthcare**: Physical therapy and rehabilitation
+ - **Research**: Large-scale movement pattern studies
+ - **Entertainment**: Interactive applications and games
+ - **Education**: Movement teaching and body awareness
+
+ ## π Integration Examples
+
+ ### Gradio Client
+ ```python
+ from gradio_client import Client
+
+ client = Client("http://localhost:7860")
+ result = client.predict(
+ video="path/to/video.mp4",
+ model="mediapipe-full",
+ api_name="/analyze_enhanced"
+ )
+ ```
+
+ ### Batch Processing
+ ```python
+ results = client.predict(
+ files=["video1.mp4", "video2.mp4"],
+ model="yolo-v11-s",
+ api_name="/batch_analyze"
+ )
+ ```
+ \"\"\")
+ gr.HTML(\"\"\"
+
+ \"\"\")
+
+ # Footer with proper attribution
+ gr.HTML(\"\"\"
+
+
+ π Laban Movement Analysis - Complete Suite | Heavy Beta Version
+
+
+ Created by Csaba BolyΓ³s | Powered by MediaPipe, MoveNet & YOLO
+
+
+ GitHub β’
+ Hugging Face β’
+ LinkedIn
+
+
+ \"\"\")
+
+ return demo
+
+
+if __name__ == "__main__":
+ demo = create_unified_demo()
+ demo.launch(
+ server_name="0.0.0.0",
+ server_port=7860,
+ share=False,
+ show_error=True,
+ favicon_path=None
+ )
+
+```
+""", elem_classes=["md-custom"], header_links=True)
+
+
+ gr.Markdown("""
+## `LabanMovementAnalysis`
+
+### Initialization
+""", elem_classes=["md-custom"], header_links=True)
+
+ gr.ParamViewer(value=_docs["LabanMovementAnalysis"]["members"]["__init__"], linkify=[])
+
+
+
+
+ gr.Markdown("""
+
+### User function
+
+The impact on the users predict function varies depending on whether the component is used as an input or output for an event (or both).
+
+- When used as an Input, the component only impacts the input signature of the user function.
+- When used as an output, the component only impacts the return signature of the user function.
+
+The code snippet below is accurate in cases where the component is used as both an input and an output.
+
+- **As input:** Is passed, processed data for analysis.
+- **As output:** Should return, analysis results.
+
+ ```python
+def predict(
+ value: typing.Dict[str, typing.Any][str, typing.Any]
+) -> typing.Any:
+ return value
+```
+""", elem_classes=["md-custom", "LabanMovementAnalysis-user-fn"], header_links=True)
+
+
+
+
+ demo.load(None, js=r"""function() {
+ const refs = {};
+ const user_fn_refs = {
+ LabanMovementAnalysis: [], };
+ requestAnimationFrame(() => {
+
+ Object.entries(user_fn_refs).forEach(([key, refs]) => {
+ if (refs.length > 0) {
+ const el = document.querySelector(`.${key}-user-fn`);
+ if (!el) return;
+ refs.forEach(ref => {
+ el.innerHTML = el.innerHTML.replace(
+ new RegExp("\\b"+ref+"\\b", "g"),
+ `${ref}`
+ );
+ })
+ }
+ })
+
+ Object.entries(refs).forEach(([key, refs]) => {
+ if (refs.length > 0) {
+ const el = document.querySelector(`.${key}`);
+ if (!el) return;
+ refs.forEach(ref => {
+ el.innerHTML = el.innerHTML.replace(
+ new RegExp("\\b"+ref+"\\b", "g"),
+ `${ref}`
+ );
+ })
+ }
+ })
+ })
+}
+
+""")
+
+demo.launch()
diff --git a/demo/test_component.py b/demo/test_component.py
new file mode 100644
index 0000000000000000000000000000000000000000..e3b62cb02f48ad5704aa17d9ffb636efb9444ad0
--- /dev/null
+++ b/demo/test_component.py
@@ -0,0 +1,69 @@
+"""
+Test script to verify the Laban Movement Analysis component structure.
+"""
+
+import sys
+from pathlib import Path
+
+# Add parent directory to path
+sys.path.insert(0, str(Path(__file__).parent.parent / "backend"))
+
+# Test imports
+try:
+ from gradio_labanmovementanalysis import LabanMovementAnalysis
+ print("β LabanMovementAnalysis component imported successfully")
+
+ from gradio_labanmovementanalysis import video_utils
+ print("β video_utils module imported successfully")
+
+ from gradio_labanmovementanalysis import pose_estimation
+ print("β pose_estimation module imported successfully")
+
+ from gradio_labanmovementanalysis import notation_engine
+ print("β notation_engine module imported successfully")
+
+ from gradio_labanmovementanalysis import json_generator
+ print("β json_generator module imported successfully")
+
+ from gradio_labanmovementanalysis import visualizer
+ print("β visualizer module imported successfully")
+
+except ImportError as e:
+ print(f"β Import error: {e}")
+ sys.exit(1)
+
+# Test component instantiation
+try:
+ component = LabanMovementAnalysis()
+ print("\nβ Component instantiated successfully")
+
+ # Test component methods
+ example_payload = component.example_payload()
+ print(f"β Example payload: {example_payload}")
+
+ example_value = component.example_value()
+ print(f"β Example value keys: {list(example_value.keys())}")
+
+ api_info = component.api_info()
+ print(f"β API info type: {api_info['type']}")
+
+except Exception as e:
+ print(f"β Component error: {e}")
+ sys.exit(1)
+
+# Test data structures
+try:
+ from gradio_labanmovementanalysis.pose_estimation import Keypoint, PoseResult
+ kp = Keypoint(x=0.5, y=0.5, confidence=0.9, name="nose")
+ print(f"\nβ Keypoint created: {kp}")
+
+ from gradio_labanmovementanalysis.notation_engine import Direction, Speed, Intensity
+ print(f"β Direction values: {[d.value for d in Direction]}")
+ print(f"β Speed values: {[s.value for s in Speed]}")
+ print(f"β Intensity values: {[i.value for i in Intensity]}")
+
+except Exception as e:
+ print(f"β Data structure error: {e}")
+ sys.exit(1)
+
+print("\nβ
All tests passed! The component is properly structured.")
\ No newline at end of file
diff --git a/dist/gradio_labanmovementanalysis-0.0.1-py3-none-any.whl b/dist/gradio_labanmovementanalysis-0.0.1-py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..0aa80ddf607cbbeace198be2a054b6108406f480
Binary files /dev/null and b/dist/gradio_labanmovementanalysis-0.0.1-py3-none-any.whl differ
diff --git a/dist/gradio_labanmovementanalysis-0.0.1.tar.gz b/dist/gradio_labanmovementanalysis-0.0.1.tar.gz
new file mode 100644
index 0000000000000000000000000000000000000000..e0742763c6820afa83c6fbaf8ab1582dd125b959
--- /dev/null
+++ b/dist/gradio_labanmovementanalysis-0.0.1.tar.gz
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c357282c4575834cf159824141a52f9d6fc99113c48438c70b24b0bcb41add51
+size 84525422
diff --git a/dist/gradio_labanmovementanalysis-0.0.2-py3-none-any.whl b/dist/gradio_labanmovementanalysis-0.0.2-py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..be2394b6900cb589a0f01b4bceae3f36db867733
Binary files /dev/null and b/dist/gradio_labanmovementanalysis-0.0.2-py3-none-any.whl differ
diff --git a/dist/gradio_labanmovementanalysis-0.0.2.tar.gz b/dist/gradio_labanmovementanalysis-0.0.2.tar.gz
new file mode 100644
index 0000000000000000000000000000000000000000..aedcdb4d32b656247993e7b3fc663a961663aa8a
--- /dev/null
+++ b/dist/gradio_labanmovementanalysis-0.0.2.tar.gz
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e161cca0ea5443885301b3362b9b8a7553fa06622dcdef7a62a03a30d15271ef
+size 84525440
diff --git a/examples/agent_example.py b/examples/agent_example.py
new file mode 100644
index 0000000000000000000000000000000000000000..e45e6d6f8f34c530645396da0654d34f4815ab40
--- /dev/null
+++ b/examples/agent_example.py
@@ -0,0 +1,142 @@
+"""
+Example script demonstrating agent-friendly API usage
+for Laban Movement Analysis
+"""
+
+import sys
+from pathlib import Path
+
+# Add parent to path
+sys.path.insert(0, str(Path(__file__).parent.parent / "backend"))
+
+from gradio_labanmovementanalysis import (
+ LabanAgentAPI,
+ PoseModel,
+ MovementDirection,
+ MovementIntensity,
+ quick_analyze,
+ analyze_and_summarize
+)
+
+
+def main():
+ """Demonstrate various agent API features"""
+
+ print("π Laban Movement Analysis - Agent API Examples\n")
+
+ # Example video paths (replace with your own)
+ video_path = "examples/dance.mp4"
+
+ # 1. Quick analysis with summary
+ print("1. Quick Analysis with Summary")
+ print("-" * 40)
+ summary = analyze_and_summarize(video_path)
+ print(summary)
+ print()
+
+ # 2. Detailed analysis with structured output
+ print("2. Detailed Analysis")
+ print("-" * 40)
+ api = LabanAgentAPI()
+ result = api.analyze(video_path, generate_visualization=True)
+
+ if result.success:
+ print(f"β Analysis successful!")
+ print(f" Direction: {result.dominant_direction.value}")
+ print(f" Intensity: {result.dominant_intensity.value}")
+ print(f" Speed: {result.dominant_speed}")
+ print(f" Fluidity: {result.fluidity_score:.2f}")
+ print(f" Expansion: {result.expansion_score:.2f}")
+ print(f" Segments: {len(result.movement_segments)}")
+ if result.visualization_path:
+ print(f" Visualization saved to: {result.visualization_path}")
+ else:
+ print(f"β Analysis failed: {result.error}")
+ print()
+
+ # 3. Batch processing example
+ print("3. Batch Processing")
+ print("-" * 40)
+ video_paths = [
+ "examples/dance.mp4",
+ "examples/exercise.mp4",
+ "examples/walking.mp4"
+ ]
+
+ # Filter out non-existent files
+ existing_paths = [p for p in video_paths if Path(p).exists()]
+
+ if existing_paths:
+ results = api.batch_analyze(existing_paths, parallel=True)
+ for i, result in enumerate(results):
+ print(f"Video {i+1}: {Path(result.video_path).name}")
+ if result.success:
+ print(f" β {result.dominant_direction.value} movement, "
+ f"{result.dominant_intensity.value} intensity")
+ else:
+ print(f" β Failed: {result.error}")
+ else:
+ print(" No example videos found")
+ print()
+
+ # 4. Movement filtering example
+ print("4. Movement Filtering")
+ print("-" * 40)
+ if existing_paths and len(existing_paths) > 1:
+ # Find high-intensity movements
+ high_intensity = api.filter_by_movement(
+ existing_paths,
+ intensity=MovementIntensity.HIGH,
+ min_fluidity=0.5
+ )
+
+ print(f"Found {len(high_intensity)} high-intensity videos:")
+ for result in high_intensity:
+ print(f" - {Path(result.video_path).name}: "
+ f"fluidity={result.fluidity_score:.2f}")
+ print()
+
+ # 5. Video comparison example
+ print("5. Video Comparison")
+ print("-" * 40)
+ if len(existing_paths) >= 2:
+ comparison = api.compare_videos(existing_paths[0], existing_paths[1])
+ print(f"Comparing: {comparison['video1']} vs {comparison['video2']}")
+ print(f" Direction match: {comparison['metrics']['direction_match']}")
+ print(f" Intensity match: {comparison['metrics']['intensity_match']}")
+ print(f" Fluidity difference: {comparison['metrics']['fluidity_difference']:.2f}")
+ print()
+
+ # 6. Model comparison
+ print("6. Model Comparison")
+ print("-" * 40)
+ if existing_paths:
+ test_video = existing_paths[0]
+ models = [PoseModel.MEDIAPIPE, PoseModel.MOVENET]
+
+ for model in models:
+ result = api.analyze(test_video, model=model)
+ if result.success:
+ print(f"{model.value}: {result.dominant_direction.value} "
+ f"({result.dominant_intensity.value})")
+
+
+def async_example():
+ """Example of async usage"""
+ import asyncio
+
+ async def analyze_async():
+ api = LabanAgentAPI()
+ result = await api.analyze_async("examples/dance.mp4")
+ return api.get_movement_summary(result)
+
+ # Run async example
+ summary = asyncio.run(analyze_async())
+ print("Async Analysis:", summary)
+
+
+if __name__ == "__main__":
+ main()
+
+ # Uncomment to run async example
+ # async_example()
\ No newline at end of file
diff --git a/frontend/Example.svelte b/frontend/Example.svelte
new file mode 100644
index 0000000000000000000000000000000000000000..6b6a4a08c938f1b38a8a090480c8e3566e8747af
--- /dev/null
+++ b/frontend/Example.svelte
@@ -0,0 +1,19 @@
+
+
+
+ {value}
+
+
+
diff --git a/frontend/Index.svelte b/frontend/Index.svelte
new file mode 100644
index 0000000000000000000000000000000000000000..129f056ae728a71f088c0746fa8328fd22444f55
--- /dev/null
+++ b/frontend/Index.svelte
@@ -0,0 +1,37 @@
+
+
+
+ {#if loading_status}
+ gradio.dispatch("clear_status", loading_status)}
+ />
+ {/if}
+
+
+
diff --git a/frontend/gradio.config.js b/frontend/gradio.config.js
new file mode 100644
index 0000000000000000000000000000000000000000..19f8f32584367502b94ff73d80c472108f7a896a
--- /dev/null
+++ b/frontend/gradio.config.js
@@ -0,0 +1,9 @@
+export default {
+ plugins: [],
+ svelte: {
+ preprocess: [],
+ },
+ build: {
+ target: "modules",
+ },
+};
\ No newline at end of file
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
new file mode 100644
index 0000000000000000000000000000000000000000..a55cb442e8fcaab4873b63330b76d17a952d5cd5
--- /dev/null
+++ b/frontend/package-lock.json
@@ -0,0 +1,6785 @@
+{
+ "name": "gradio_labanmovementanalysis",
+ "version": "0.4.22",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "gradio_labanmovementanalysis",
+ "version": "0.4.22",
+ "license": "ISC",
+ "dependencies": {
+ "@gradio/atoms": "0.16.1",
+ "@gradio/statustracker": "0.10.12",
+ "@gradio/utils": "0.10.2",
+ "@zerodevx/svelte-json-view": "^1.0.7"
+ },
+ "devDependencies": {
+ "@gradio/preview": "0.13.0"
+ },
+ "peerDependencies": {
+ "svelte": "^4.0.0"
+ }
+ },
+ "node_modules/@adobe/css-tools": {
+ "version": "4.3.3",
+ "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.3.3.tgz",
+ "integrity": "sha512-rE0Pygv0sEZ4vBWHlAgJLGDU7Pm8xoO6p3wsEceb7GYAjScrOHpEo8KK/eVkAcnSM+slAEtXjA2JpdjLp4fJQQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@ampproject/remapping": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
+ "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@jridgewell/gen-mapping": "^0.3.5",
+ "@jridgewell/trace-mapping": "^0.3.24"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@antfu/install-pkg": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-1.1.0.tgz",
+ "integrity": "sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==",
+ "license": "MIT",
+ "dependencies": {
+ "package-manager-detector": "^1.3.0",
+ "tinyexec": "^1.0.1"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/antfu"
+ }
+ },
+ "node_modules/@antfu/utils": {
+ "version": "8.1.1",
+ "resolved": "https://registry.npmjs.org/@antfu/utils/-/utils-8.1.1.tgz",
+ "integrity": "sha512-Mex9nXf9vR6AhcXmMrlz/HVgYYZpVGJ6YlPgwl7UnaFpnshXs6EK/oa5Gpf3CzENMjkvEx2tQtntGnb7UtSTOQ==",
+ "license": "MIT",
+ "funding": {
+ "url": "https://github.com/sponsors/antfu"
+ }
+ },
+ "node_modules/@asamuzakjp/css-color": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-3.2.0.tgz",
+ "integrity": "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==",
+ "license": "MIT",
+ "dependencies": {
+ "@csstools/css-calc": "^2.1.3",
+ "@csstools/css-color-parser": "^3.0.9",
+ "@csstools/css-parser-algorithms": "^3.0.4",
+ "@csstools/css-tokenizer": "^3.0.3",
+ "lru-cache": "^10.4.3"
+ }
+ },
+ "node_modules/@babel/helper-string-parser": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
+ "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/helper-validator-identifier": {
+ "version": "7.27.1",
+ "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
+ "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@babel/parser": {
+ "version": "7.27.4",
+ "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.4.tgz",
+ "integrity": "sha512-BRmLHGwpUqLFR2jzx9orBuX/ABDkj2jLKOXrHDTN2aOKL+jFDDKaRNo9nyYsIl9h/UE/7lMKdDjKQQyxKKDZ7g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/types": "^7.27.3"
+ },
+ "bin": {
+ "parser": "bin/babel-parser.js"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@babel/types": {
+ "version": "7.27.3",
+ "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.3.tgz",
+ "integrity": "sha512-Y1GkI4ktrtvmawoSq+4FCVHNryea6uR+qUQy0AGxLSsjCX0nVmkYQMBLHDkXZuo5hGx7eYdnIaslsdBFm7zbUw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/helper-string-parser": "^7.27.1",
+ "@babel/helper-validator-identifier": "^7.27.1"
+ },
+ "engines": {
+ "node": ">=6.9.0"
+ }
+ },
+ "node_modules/@braintree/sanitize-url": {
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.1.tgz",
+ "integrity": "sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==",
+ "license": "MIT"
+ },
+ "node_modules/@chevrotain/cst-dts-gen": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz",
+ "integrity": "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@chevrotain/gast": "11.0.3",
+ "@chevrotain/types": "11.0.3",
+ "lodash-es": "4.17.21"
+ }
+ },
+ "node_modules/@chevrotain/gast": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.0.3.tgz",
+ "integrity": "sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@chevrotain/types": "11.0.3",
+ "lodash-es": "4.17.21"
+ }
+ },
+ "node_modules/@chevrotain/regexp-to-ast": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz",
+ "integrity": "sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==",
+ "license": "Apache-2.0"
+ },
+ "node_modules/@chevrotain/types": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.0.3.tgz",
+ "integrity": "sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==",
+ "license": "Apache-2.0"
+ },
+ "node_modules/@chevrotain/utils": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.0.3.tgz",
+ "integrity": "sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==",
+ "license": "Apache-2.0"
+ },
+ "node_modules/@csstools/color-helpers": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.0.2.tgz",
+ "integrity": "sha512-JqWH1vsgdGcw2RR6VliXXdA0/59LttzlU8UlRT/iUUsEeWfYq8I+K0yhihEUTTHLRm1EXvpsCx3083EU15ecsA==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT-0",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@csstools/css-calc": {
+ "version": "2.1.4",
+ "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz",
+ "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
+ }
+ },
+ "node_modules/@csstools/css-color-parser": {
+ "version": "3.0.10",
+ "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.0.10.tgz",
+ "integrity": "sha512-TiJ5Ajr6WRd1r8HSiwJvZBiJOqtH86aHpUjq5aEKWHiII2Qfjqd/HCWKPOW8EP4vcspXbHnXrwIDlu5savQipg==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "@csstools/color-helpers": "^5.0.2",
+ "@csstools/css-calc": "^2.1.4"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@csstools/css-parser-algorithms": "^3.0.5",
+ "@csstools/css-tokenizer": "^3.0.4"
+ }
+ },
+ "node_modules/@csstools/css-parser-algorithms": {
+ "version": "3.0.5",
+ "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz",
+ "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "@csstools/css-tokenizer": "^3.0.4"
+ }
+ },
+ "node_modules/@csstools/css-tokenizer": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz",
+ "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/csstools"
+ },
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/csstools"
+ }
+ ],
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/aix-ppc64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz",
+ "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-arm": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz",
+ "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==",
+ "cpu": [
+ "arm"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz",
+ "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==",
+ "cpu": [
+ "arm64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz",
+ "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==",
+ "cpu": [
+ "x64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/darwin-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz",
+ "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==",
+ "cpu": [
+ "arm64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/darwin-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz",
+ "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==",
+ "cpu": [
+ "x64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz",
+ "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==",
+ "cpu": [
+ "arm64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/freebsd-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz",
+ "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==",
+ "cpu": [
+ "x64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-arm": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz",
+ "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==",
+ "cpu": [
+ "arm"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz",
+ "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==",
+ "cpu": [
+ "arm64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-ia32": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz",
+ "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==",
+ "cpu": [
+ "ia32"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-loong64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.14.54.tgz",
+ "integrity": "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-mips64el": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz",
+ "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==",
+ "cpu": [
+ "mips64el"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-ppc64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz",
+ "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==",
+ "cpu": [
+ "ppc64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-riscv64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz",
+ "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==",
+ "cpu": [
+ "riscv64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-s390x": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz",
+ "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==",
+ "cpu": [
+ "s390x"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz",
+ "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==",
+ "cpu": [
+ "x64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/netbsd-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz",
+ "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==",
+ "cpu": [
+ "x64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/openbsd-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz",
+ "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==",
+ "cpu": [
+ "x64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/sunos-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz",
+ "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==",
+ "cpu": [
+ "x64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz",
+ "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==",
+ "cpu": [
+ "arm64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-ia32": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz",
+ "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==",
+ "cpu": [
+ "ia32"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz",
+ "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==",
+ "cpu": [
+ "x64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@formatjs/ecma402-abstract": {
+ "version": "1.11.4",
+ "resolved": "https://registry.npmjs.org/@formatjs/ecma402-abstract/-/ecma402-abstract-1.11.4.tgz",
+ "integrity": "sha512-EBikYFp2JCdIfGEb5G9dyCkTGDmC57KSHhRQOC3aYxoPWVZvfWCDjZwkGYHN7Lis/fmuWl906bnNTJifDQ3sXw==",
+ "license": "MIT",
+ "dependencies": {
+ "@formatjs/intl-localematcher": "0.2.25",
+ "tslib": "^2.1.0"
+ }
+ },
+ "node_modules/@formatjs/fast-memoize": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/@formatjs/fast-memoize/-/fast-memoize-1.2.1.tgz",
+ "integrity": "sha512-Rg0e76nomkz3vF9IPlKeV+Qynok0r7YZjL6syLz4/urSg0IbjPZCB/iYUMNsYA643gh4mgrX3T7KEIFIxJBQeg==",
+ "license": "MIT",
+ "dependencies": {
+ "tslib": "^2.1.0"
+ }
+ },
+ "node_modules/@formatjs/icu-messageformat-parser": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/@formatjs/icu-messageformat-parser/-/icu-messageformat-parser-2.1.0.tgz",
+ "integrity": "sha512-Qxv/lmCN6hKpBSss2uQ8IROVnta2r9jd3ymUEIjm2UyIkUCHVcbUVRGL/KS/wv7876edvsPe+hjHVJ4z8YuVaw==",
+ "license": "MIT",
+ "dependencies": {
+ "@formatjs/ecma402-abstract": "1.11.4",
+ "@formatjs/icu-skeleton-parser": "1.3.6",
+ "tslib": "^2.1.0"
+ }
+ },
+ "node_modules/@formatjs/icu-skeleton-parser": {
+ "version": "1.3.6",
+ "resolved": "https://registry.npmjs.org/@formatjs/icu-skeleton-parser/-/icu-skeleton-parser-1.3.6.tgz",
+ "integrity": "sha512-I96mOxvml/YLrwU2Txnd4klA7V8fRhb6JG/4hm3VMNmeJo1F03IpV2L3wWt7EweqNLES59SZ4d6hVOPCSf80Bg==",
+ "license": "MIT",
+ "dependencies": {
+ "@formatjs/ecma402-abstract": "1.11.4",
+ "tslib": "^2.1.0"
+ }
+ },
+ "node_modules/@formatjs/intl-localematcher": {
+ "version": "0.2.25",
+ "resolved": "https://registry.npmjs.org/@formatjs/intl-localematcher/-/intl-localematcher-0.2.25.tgz",
+ "integrity": "sha512-YmLcX70BxoSopLFdLr1Ds99NdlTI2oWoLbaUW2M406lxOIPzE1KQhRz2fPUkq34xVZQaihCoU29h0KK7An3bhA==",
+ "license": "MIT",
+ "dependencies": {
+ "tslib": "^2.1.0"
+ }
+ },
+ "node_modules/@gradio/atoms": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@gradio/atoms/-/atoms-0.16.1.tgz",
+ "integrity": "sha512-3T3055D6s1c7TphXtrUBUcgD4LkEF4F7EK0Zu/wx0ChzoANGO5GT2mmrY5lRyup2c8rYtmbyVxTfLQtBSjS+Fg==",
+ "license": "ISC",
+ "dependencies": {
+ "@gradio/icons": "^0.12.0",
+ "@gradio/markdown-code": "^0.4.3",
+ "@gradio/utils": "^0.10.2"
+ },
+ "peerDependencies": {
+ "svelte": "^4.0.0"
+ }
+ },
+ "node_modules/@gradio/icons": {
+ "version": "0.12.0",
+ "resolved": "https://registry.npmjs.org/@gradio/icons/-/icons-0.12.0.tgz",
+ "integrity": "sha512-QQuEcYpJwIBrwxmjjx13qL3abVR5Uma/wdbhOCUqX/eowGBCDo8TKn8mX3oRkBMoCSKAuEI4tClikPTAQg/ozg==",
+ "license": "ISC",
+ "peerDependencies": {
+ "svelte": "^4.0.0"
+ }
+ },
+ "node_modules/@gradio/markdown-code": {
+ "version": "0.4.3",
+ "resolved": "https://registry.npmjs.org/@gradio/markdown-code/-/markdown-code-0.4.3.tgz",
+ "integrity": "sha512-o/qDUUCWZNyl8nuMkgciCeBngyfdLXWOm2yJocpaTrMXGvJJxm80LkBfdHBDwvIcBaPc7zuPRAn7MIJXFQKbyQ==",
+ "license": "ISC",
+ "dependencies": {
+ "@gradio/sanitize": "^0.1.3",
+ "@types/dompurify": "^3.0.2",
+ "@types/katex": "^0.16.0",
+ "@types/prismjs": "1.26.4",
+ "github-slugger": "^2.0.0",
+ "isomorphic-dompurify": "^2.14.0",
+ "katex": "^0.16.7",
+ "marked": "^12.0.0",
+ "marked-gfm-heading-id": "^3.1.2",
+ "marked-highlight": "^2.0.1",
+ "mermaid": "^11.5.0",
+ "prismjs": "1.29.0"
+ },
+ "peerDependencies": {
+ "svelte": "^4.0.0"
+ }
+ },
+ "node_modules/@gradio/preview": {
+ "version": "0.13.0",
+ "resolved": "https://registry.npmjs.org/@gradio/preview/-/preview-0.13.0.tgz",
+ "integrity": "sha512-MuZw7cpBCrcSfbQ4iRAhA++6eUP7h3q5h7E03R43byT211Y7/7fQKZqN3fmZy0+VSzp1XSbdlW09/bXmARdR/Q==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "@originjs/vite-plugin-commonjs": "^1.0.3",
+ "@rollup/plugin-sucrase": "^5.0.1",
+ "@sveltejs/vite-plugin-svelte": "^3.1.0",
+ "@types/which": "^3.0.0",
+ "coffeescript": "^2.7.0",
+ "lightningcss": "^1.21.7",
+ "pug": "^3.0.2",
+ "sass": "^1.66.1",
+ "stylus": "^0.63.0",
+ "sucrase": "^3.34.0",
+ "sugarss": "^4.0.1",
+ "svelte-hmr": "^0.16.0",
+ "svelte-preprocess": "^6.0.3",
+ "typescript": "^5.0.0",
+ "vite": "^5.2.9",
+ "which": "4.0.0",
+ "yootils": "^0.3.1"
+ },
+ "optionalDependencies": {
+ "svelte": "^4.0.0"
+ }
+ },
+ "node_modules/@gradio/sanitize": {
+ "version": "0.1.3",
+ "resolved": "https://registry.npmjs.org/@gradio/sanitize/-/sanitize-0.1.3.tgz",
+ "integrity": "sha512-IQXf1/dqOaUSrJMO9hh9jH03lK+MA2UH977guQIvSXts7rXqorD+ChCVn7WUrAQ9Mf0DYPhRH9NMIp0s1xPZmg==",
+ "license": "ISC",
+ "dependencies": {
+ "amuchina": "^1.0.12",
+ "sanitize-html": "^2.13.0"
+ }
+ },
+ "node_modules/@gradio/statustracker": {
+ "version": "0.10.12",
+ "resolved": "https://registry.npmjs.org/@gradio/statustracker/-/statustracker-0.10.12.tgz",
+ "integrity": "sha512-sA+vWOGfB5Cy16UHJeviIHVVddwbzzTpd54VlHc/F5B+mt81GGP8F9leAz5yizQhiJQmOsvQhzbGASTyt2+G8w==",
+ "license": "ISC",
+ "dependencies": {
+ "@gradio/atoms": "^0.16.1",
+ "@gradio/icons": "^0.12.0",
+ "@gradio/utils": "^0.10.2",
+ "@types/dompurify": "^3.0.2",
+ "dompurify": "^3.0.3"
+ },
+ "peerDependencies": {
+ "svelte": "^4.0.0"
+ }
+ },
+ "node_modules/@gradio/theme": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/@gradio/theme/-/theme-0.4.0.tgz",
+ "integrity": "sha512-O/zkP7D/4U9+vFQN821YJvSemjWzi8b8ezkaJ5/ikMm2XySoAXEqafUHAZ8MEnGYXR/CLeDcoyYG1OrJxS0fnw==",
+ "license": "ISC",
+ "peerDependencies": {
+ "svelte": "^4.0.0"
+ }
+ },
+ "node_modules/@gradio/utils": {
+ "version": "0.10.2",
+ "resolved": "https://registry.npmjs.org/@gradio/utils/-/utils-0.10.2.tgz",
+ "integrity": "sha512-ldGDEqL9kVKPrfnFzfPriCqbtTOe1/IK4FHEhXCGOeqwegqnxjmummWPk633e2Yub2lT3fjEjuyDLJ7Y7vYy3w==",
+ "license": "ISC",
+ "dependencies": {
+ "@gradio/theme": "^0.4.0",
+ "svelte-i18n": "^3.6.0"
+ }
+ },
+ "node_modules/@iconify/types": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz",
+ "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==",
+ "license": "MIT"
+ },
+ "node_modules/@iconify/utils": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/@iconify/utils/-/utils-2.3.0.tgz",
+ "integrity": "sha512-GmQ78prtwYW6EtzXRU1rY+KwOKfz32PD7iJh6Iyqw68GiKuoZ2A6pRtzWONz5VQJbp50mEjXh/7NkumtrAgRKA==",
+ "license": "MIT",
+ "dependencies": {
+ "@antfu/install-pkg": "^1.0.0",
+ "@antfu/utils": "^8.1.0",
+ "@iconify/types": "^2.0.0",
+ "debug": "^4.4.0",
+ "globals": "^15.14.0",
+ "kolorist": "^1.8.0",
+ "local-pkg": "^1.0.0",
+ "mlly": "^1.7.4"
+ }
+ },
+ "node_modules/@isaacs/cliui": {
+ "version": "8.0.2",
+ "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
+ "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "string-width": "^5.1.2",
+ "string-width-cjs": "npm:string-width@^4.2.0",
+ "strip-ansi": "^7.0.1",
+ "strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
+ "wrap-ansi": "^8.1.0",
+ "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@jridgewell/gen-mapping": {
+ "version": "0.3.8",
+ "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz",
+ "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==",
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/set-array": "^1.2.1",
+ "@jridgewell/sourcemap-codec": "^1.4.10",
+ "@jridgewell/trace-mapping": "^0.3.24"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@jridgewell/resolve-uri": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
+ "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@jridgewell/set-array": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz",
+ "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
+ "node_modules/@jridgewell/sourcemap-codec": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz",
+ "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==",
+ "license": "MIT"
+ },
+ "node_modules/@jridgewell/trace-mapping": {
+ "version": "0.3.25",
+ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
+ "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/resolve-uri": "^3.1.0",
+ "@jridgewell/sourcemap-codec": "^1.4.14"
+ }
+ },
+ "node_modules/@mermaid-js/parser": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.4.0.tgz",
+ "integrity": "sha512-wla8XOWvQAwuqy+gxiZqY+c7FokraOTHRWMsbB4AgRx9Sy7zKslNyejy7E+a77qHfey5GXw/ik3IXv/NHMJgaA==",
+ "license": "MIT",
+ "dependencies": {
+ "langium": "3.3.1"
+ }
+ },
+ "node_modules/@originjs/vite-plugin-commonjs": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/@originjs/vite-plugin-commonjs/-/vite-plugin-commonjs-1.0.3.tgz",
+ "integrity": "sha512-KuEXeGPptM2lyxdIEJ4R11+5ztipHoE7hy8ClZt3PYaOVQ/pyngd2alaSrPnwyFeOW1UagRBaQ752aA1dTMdOQ==",
+ "dev": true,
+ "license": "MulanPSL2",
+ "dependencies": {
+ "esbuild": "^0.14.14"
+ }
+ },
+ "node_modules/@parcel/watcher": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.1.tgz",
+ "integrity": "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "detect-libc": "^1.0.3",
+ "is-glob": "^4.0.3",
+ "micromatch": "^4.0.5",
+ "node-addon-api": "^7.0.0"
+ },
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ },
+ "optionalDependencies": {
+ "@parcel/watcher-android-arm64": "2.5.1",
+ "@parcel/watcher-darwin-arm64": "2.5.1",
+ "@parcel/watcher-darwin-x64": "2.5.1",
+ "@parcel/watcher-freebsd-x64": "2.5.1",
+ "@parcel/watcher-linux-arm-glibc": "2.5.1",
+ "@parcel/watcher-linux-arm-musl": "2.5.1",
+ "@parcel/watcher-linux-arm64-glibc": "2.5.1",
+ "@parcel/watcher-linux-arm64-musl": "2.5.1",
+ "@parcel/watcher-linux-x64-glibc": "2.5.1",
+ "@parcel/watcher-linux-x64-musl": "2.5.1",
+ "@parcel/watcher-win32-arm64": "2.5.1",
+ "@parcel/watcher-win32-ia32": "2.5.1",
+ "@parcel/watcher-win32-x64": "2.5.1"
+ }
+ },
+ "node_modules/@parcel/watcher-android-arm64": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.1.tgz",
+ "integrity": "sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-darwin-arm64": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.1.tgz",
+ "integrity": "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-darwin-x64": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.1.tgz",
+ "integrity": "sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-freebsd-x64": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.1.tgz",
+ "integrity": "sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-linux-arm-glibc": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.1.tgz",
+ "integrity": "sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-linux-arm-musl": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.1.tgz",
+ "integrity": "sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-linux-arm64-glibc": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.1.tgz",
+ "integrity": "sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-linux-arm64-musl": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.1.tgz",
+ "integrity": "sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-linux-x64-glibc": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.1.tgz",
+ "integrity": "sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-linux-x64-musl": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.1.tgz",
+ "integrity": "sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-win32-arm64": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.1.tgz",
+ "integrity": "sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-win32-ia32": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.1.tgz",
+ "integrity": "sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher-win32-x64": {
+ "version": "2.5.1",
+ "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.1.tgz",
+ "integrity": "sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">= 10.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/@parcel/watcher/node_modules/detect-libc": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
+ "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "optional": true,
+ "bin": {
+ "detect-libc": "bin/detect-libc.js"
+ },
+ "engines": {
+ "node": ">=0.10"
+ }
+ },
+ "node_modules/@pkgjs/parseargs": {
+ "version": "0.11.0",
+ "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
+ "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "engines": {
+ "node": ">=14"
+ }
+ },
+ "node_modules/@rollup/plugin-sucrase": {
+ "version": "5.0.2",
+ "resolved": "https://registry.npmjs.org/@rollup/plugin-sucrase/-/plugin-sucrase-5.0.2.tgz",
+ "integrity": "sha512-4MhIVH9Dy2Hwose1/x5QMs0XF7yn9jDd/yozHqzdIrMWIolgFpGnrnVhQkqTaK1RALY/fpyrEKmwH/04vr1THA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@rollup/pluginutils": "^5.0.1",
+ "sucrase": "^3.27.0"
+ },
+ "engines": {
+ "node": ">=14.0.0"
+ },
+ "peerDependencies": {
+ "rollup": "^2.53.1||^3.0.0||^4.0.0"
+ },
+ "peerDependenciesMeta": {
+ "rollup": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@rollup/pluginutils": {
+ "version": "5.1.4",
+ "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.4.tgz",
+ "integrity": "sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "^1.0.0",
+ "estree-walker": "^2.0.2",
+ "picomatch": "^4.0.2"
+ },
+ "engines": {
+ "node": ">=14.0.0"
+ },
+ "peerDependencies": {
+ "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0"
+ },
+ "peerDependenciesMeta": {
+ "rollup": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@rollup/rollup-android-arm-eabi": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.41.1.tgz",
+ "integrity": "sha512-NELNvyEWZ6R9QMkiytB4/L4zSEaBC03KIXEghptLGLZWJ6VPrL63ooZQCOnlx36aQPGhzuOMwDerC1Eb2VmrLw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ]
+ },
+ "node_modules/@rollup/rollup-android-arm64": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.41.1.tgz",
+ "integrity": "sha512-DXdQe1BJ6TK47ukAoZLehRHhfKnKg9BjnQYUu9gzhI8Mwa1d2fzxA1aw2JixHVl403bwp1+/o/NhhHtxWJBgEA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ]
+ },
+ "node_modules/@rollup/rollup-darwin-arm64": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.41.1.tgz",
+ "integrity": "sha512-5afxvwszzdulsU2w8JKWwY8/sJOLPzf0e1bFuvcW5h9zsEg+RQAojdW0ux2zyYAz7R8HvvzKCjLNJhVq965U7w==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@rollup/rollup-darwin-x64": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.41.1.tgz",
+ "integrity": "sha512-egpJACny8QOdHNNMZKf8xY0Is6gIMz+tuqXlusxquWu3F833DcMwmGM7WlvCO9sB3OsPjdC4U0wHw5FabzCGZg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-arm64": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.41.1.tgz",
+ "integrity": "sha512-DBVMZH5vbjgRk3r0OzgjS38z+atlupJ7xfKIDJdZZL6sM6wjfDNo64aowcLPKIx7LMQi8vybB56uh1Ftck/Atg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-x64": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.41.1.tgz",
+ "integrity": "sha512-3FkydeohozEskBxNWEIbPfOE0aqQgB6ttTkJ159uWOFn42VLyfAiyD9UK5mhu+ItWzft60DycIN1Xdgiy8o/SA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm-gnueabihf": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.41.1.tgz",
+ "integrity": "sha512-wC53ZNDgt0pqx5xCAgNunkTzFE8GTgdZ9EwYGVcg+jEjJdZGtq9xPjDnFgfFozQI/Xm1mh+D9YlYtl+ueswNEg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm-musleabihf": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.41.1.tgz",
+ "integrity": "sha512-jwKCca1gbZkZLhLRtsrka5N8sFAaxrGz/7wRJ8Wwvq3jug7toO21vWlViihG85ei7uJTpzbXZRcORotE+xyrLA==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm64-gnu": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.41.1.tgz",
+ "integrity": "sha512-g0UBcNknsmmNQ8V2d/zD2P7WWfJKU0F1nu0k5pW4rvdb+BIqMm8ToluW/eeRmxCared5dD76lS04uL4UaNgpNA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm64-musl": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.41.1.tgz",
+ "integrity": "sha512-XZpeGB5TKEZWzIrj7sXr+BEaSgo/ma/kCgrZgL0oo5qdB1JlTzIYQKel/RmhT6vMAvOdM2teYlAaOGJpJ9lahg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-loongarch64-gnu": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.41.1.tgz",
+ "integrity": "sha512-bkCfDJ4qzWfFRCNt5RVV4DOw6KEgFTUZi2r2RuYhGWC8WhCA8lCAJhDeAmrM/fdiAH54m0mA0Vk2FGRPyzI+tw==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-powerpc64le-gnu": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.41.1.tgz",
+ "integrity": "sha512-3mr3Xm+gvMX+/8EKogIZSIEF0WUu0HL9di+YWlJpO8CQBnoLAEL/roTCxuLncEdgcfJcvA4UMOf+2dnjl4Ut1A==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-riscv64-gnu": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.41.1.tgz",
+ "integrity": "sha512-3rwCIh6MQ1LGrvKJitQjZFuQnT2wxfU+ivhNBzmxXTXPllewOF7JR1s2vMX/tWtUYFgphygxjqMl76q4aMotGw==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-riscv64-musl": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.41.1.tgz",
+ "integrity": "sha512-LdIUOb3gvfmpkgFZuccNa2uYiqtgZAz3PTzjuM5bH3nvuy9ty6RGc/Q0+HDFrHrizJGVpjnTZ1yS5TNNjFlklw==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-s390x-gnu": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.41.1.tgz",
+ "integrity": "sha512-oIE6M8WC9ma6xYqjvPhzZYk6NbobIURvP/lEbh7FWplcMO6gn7MM2yHKA1eC/GvYwzNKK/1LYgqzdkZ8YFxR8g==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-x64-gnu": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.41.1.tgz",
+ "integrity": "sha512-cWBOvayNvA+SyeQMp79BHPK8ws6sHSsYnK5zDcsC3Hsxr1dgTABKjMnMslPq1DvZIp6uO7kIWhiGwaTdR4Og9A==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-x64-musl": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.41.1.tgz",
+ "integrity": "sha512-y5CbN44M+pUCdGDlZFzGGBSKCA4A/J2ZH4edTYSSxFg7ce1Xt3GtydbVKWLlzL+INfFIZAEg1ZV6hh9+QQf9YQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-arm64-msvc": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.41.1.tgz",
+ "integrity": "sha512-lZkCxIrjlJlMt1dLO/FbpZbzt6J/A8p4DnqzSa4PWqPEUUUnzXLeki/iyPLfV0BmHItlYgHUqJe+3KiyydmiNQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-ia32-msvc": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.41.1.tgz",
+ "integrity": "sha512-+psFT9+pIh2iuGsxFYYa/LhS5MFKmuivRsx9iPJWNSGbh2XVEjk90fmpUEjCnILPEPJnikAU6SFDiEUyOv90Pg==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-x64-msvc": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.41.1.tgz",
+ "integrity": "sha512-Wq2zpapRYLfi4aKxf2Xff0tN+7slj2d4R87WEzqw7ZLsVvO5zwYCIuEGSZYiK41+GlwUo1HiR+GdkLEJnCKTCw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@sveltejs/vite-plugin-svelte": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-3.1.2.tgz",
+ "integrity": "sha512-Txsm1tJvtiYeLUVRNqxZGKR/mI+CzuIQuc2gn+YCs9rMTowpNZ2Nqt53JdL8KF9bLhAf2ruR/dr9eZCwdTriRA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@sveltejs/vite-plugin-svelte-inspector": "^2.1.0",
+ "debug": "^4.3.4",
+ "deepmerge": "^4.3.1",
+ "kleur": "^4.1.5",
+ "magic-string": "^0.30.10",
+ "svelte-hmr": "^0.16.0",
+ "vitefu": "^0.2.5"
+ },
+ "engines": {
+ "node": "^18.0.0 || >=20"
+ },
+ "peerDependencies": {
+ "svelte": "^4.0.0 || ^5.0.0-next.0",
+ "vite": "^5.0.0"
+ }
+ },
+ "node_modules/@sveltejs/vite-plugin-svelte-inspector": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-2.1.0.tgz",
+ "integrity": "sha512-9QX28IymvBlSCqsCll5t0kQVxipsfhFFL+L2t3nTWfXnddYwxBuAEtTtlaVQpRz9c37BhJjltSeY4AJSC03SSg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "debug": "^4.3.4"
+ },
+ "engines": {
+ "node": "^18.0.0 || >=20"
+ },
+ "peerDependencies": {
+ "@sveltejs/vite-plugin-svelte": "^3.0.0",
+ "svelte": "^4.0.0 || ^5.0.0-next.0",
+ "vite": "^5.0.0"
+ }
+ },
+ "node_modules/@types/d3": {
+ "version": "7.4.3",
+ "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz",
+ "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-array": "*",
+ "@types/d3-axis": "*",
+ "@types/d3-brush": "*",
+ "@types/d3-chord": "*",
+ "@types/d3-color": "*",
+ "@types/d3-contour": "*",
+ "@types/d3-delaunay": "*",
+ "@types/d3-dispatch": "*",
+ "@types/d3-drag": "*",
+ "@types/d3-dsv": "*",
+ "@types/d3-ease": "*",
+ "@types/d3-fetch": "*",
+ "@types/d3-force": "*",
+ "@types/d3-format": "*",
+ "@types/d3-geo": "*",
+ "@types/d3-hierarchy": "*",
+ "@types/d3-interpolate": "*",
+ "@types/d3-path": "*",
+ "@types/d3-polygon": "*",
+ "@types/d3-quadtree": "*",
+ "@types/d3-random": "*",
+ "@types/d3-scale": "*",
+ "@types/d3-scale-chromatic": "*",
+ "@types/d3-selection": "*",
+ "@types/d3-shape": "*",
+ "@types/d3-time": "*",
+ "@types/d3-time-format": "*",
+ "@types/d3-timer": "*",
+ "@types/d3-transition": "*",
+ "@types/d3-zoom": "*"
+ }
+ },
+ "node_modules/@types/d3-array": {
+ "version": "3.2.1",
+ "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz",
+ "integrity": "sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-axis": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz",
+ "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-selection": "*"
+ }
+ },
+ "node_modules/@types/d3-brush": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz",
+ "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-selection": "*"
+ }
+ },
+ "node_modules/@types/d3-chord": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz",
+ "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-color": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz",
+ "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-contour": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz",
+ "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-array": "*",
+ "@types/geojson": "*"
+ }
+ },
+ "node_modules/@types/d3-delaunay": {
+ "version": "6.0.4",
+ "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz",
+ "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-dispatch": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.6.tgz",
+ "integrity": "sha512-4fvZhzMeeuBJYZXRXrRIQnvUYfyXwYmLsdiN7XXmVNQKKw1cM8a5WdID0g1hVFZDqT9ZqZEY5pD44p24VS7iZQ==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-drag": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz",
+ "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-selection": "*"
+ }
+ },
+ "node_modules/@types/d3-dsv": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz",
+ "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-ease": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz",
+ "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-fetch": {
+ "version": "3.0.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz",
+ "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-dsv": "*"
+ }
+ },
+ "node_modules/@types/d3-force": {
+ "version": "3.0.10",
+ "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz",
+ "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-format": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz",
+ "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-geo": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz",
+ "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/geojson": "*"
+ }
+ },
+ "node_modules/@types/d3-hierarchy": {
+ "version": "3.1.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz",
+ "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-interpolate": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz",
+ "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-color": "*"
+ }
+ },
+ "node_modules/@types/d3-path": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz",
+ "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-polygon": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz",
+ "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-quadtree": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz",
+ "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-random": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz",
+ "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-scale": {
+ "version": "4.0.9",
+ "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz",
+ "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-time": "*"
+ }
+ },
+ "node_modules/@types/d3-scale-chromatic": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz",
+ "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-selection": {
+ "version": "3.0.11",
+ "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz",
+ "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-shape": {
+ "version": "3.1.7",
+ "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.7.tgz",
+ "integrity": "sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-path": "*"
+ }
+ },
+ "node_modules/@types/d3-time": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz",
+ "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-time-format": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz",
+ "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-timer": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz",
+ "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==",
+ "license": "MIT"
+ },
+ "node_modules/@types/d3-transition": {
+ "version": "3.0.9",
+ "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz",
+ "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-selection": "*"
+ }
+ },
+ "node_modules/@types/d3-zoom": {
+ "version": "3.0.8",
+ "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz",
+ "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/d3-interpolate": "*",
+ "@types/d3-selection": "*"
+ }
+ },
+ "node_modules/@types/dompurify": {
+ "version": "3.0.5",
+ "resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-3.0.5.tgz",
+ "integrity": "sha512-1Wg0g3BtQF7sSb27fJQAKck1HECM6zV1EB66j8JH9i3LCjYabJa0FSdiSgsD5K/RbrsR0SiraKacLB+T8ZVYAg==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/trusted-types": "*"
+ }
+ },
+ "node_modules/@types/estree": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz",
+ "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==",
+ "license": "MIT"
+ },
+ "node_modules/@types/geojson": {
+ "version": "7946.0.16",
+ "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz",
+ "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/katex": {
+ "version": "0.16.7",
+ "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.7.tgz",
+ "integrity": "sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==",
+ "license": "MIT"
+ },
+ "node_modules/@types/prismjs": {
+ "version": "1.26.4",
+ "resolved": "https://registry.npmjs.org/@types/prismjs/-/prismjs-1.26.4.tgz",
+ "integrity": "sha512-rlAnzkW2sZOjbqZ743IHUhFcvzaGbqijwOu8QZnZCjfQzBqFE3s4lOTJEsxikImav9uzz/42I+O7YUs1mWgMlg==",
+ "license": "MIT"
+ },
+ "node_modules/@types/trusted-types": {
+ "version": "2.0.7",
+ "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz",
+ "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==",
+ "license": "MIT"
+ },
+ "node_modules/@types/which": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@types/which/-/which-3.0.4.tgz",
+ "integrity": "sha512-liyfuo/106JdlgSchJzXEQCVArk0CvevqPote8F8HgWgJ3dRCcTHgJIsLDuee0kxk/mhbInzIZk3QWSZJ8R+2w==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@zerodevx/svelte-json-view": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/@zerodevx/svelte-json-view/-/svelte-json-view-1.0.11.tgz",
+ "integrity": "sha512-mIjj0H1al/P4FPlbeDoiey93lNEUqBEAe5LIdD5GttZfEYt3awexD2lHwKNfUeY4jHizOJkoWTPN/2iO0GBqpw==",
+ "license": "ISC",
+ "peerDependencies": {
+ "svelte": "^3.57.0 || ^4.0.0 || ^5.0.0"
+ }
+ },
+ "node_modules/acorn": {
+ "version": "8.14.1",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz",
+ "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==",
+ "license": "MIT",
+ "bin": {
+ "acorn": "bin/acorn"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/agent-base": {
+ "version": "7.1.3",
+ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
+ "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/amuchina": {
+ "version": "1.0.12",
+ "resolved": "https://registry.npmjs.org/amuchina/-/amuchina-1.0.12.tgz",
+ "integrity": "sha512-Itv2NEwpiV53+bkpviJIC12+8SOlCSLR1HgQCv6wD7ldNFNesm4JSk7XjvTFkeVfLYzqKEZcEBZO1X/V2MYg4A=="
+ },
+ "node_modules/ansi-regex": {
+ "version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz",
+ "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-regex?sponsor=1"
+ }
+ },
+ "node_modules/ansi-styles": {
+ "version": "6.2.1",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
+ "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/any-promise": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz",
+ "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/aria-query": {
+ "version": "5.3.2",
+ "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz",
+ "integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/asap": {
+ "version": "2.0.6",
+ "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz",
+ "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/assert-never": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/assert-never/-/assert-never-1.4.0.tgz",
+ "integrity": "sha512-5oJg84os6NMQNl27T9LnZkvvqzvAnHu03ShCnoj6bsJwS7L8AO4lf+C/XjK/nvzEqQB744moC6V128RucQd1jA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/axobject-query": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz",
+ "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/babel-walk": {
+ "version": "3.0.0-canary-5",
+ "resolved": "https://registry.npmjs.org/babel-walk/-/babel-walk-3.0.0-canary-5.tgz",
+ "integrity": "sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/types": "^7.9.6"
+ },
+ "engines": {
+ "node": ">= 10.0.0"
+ }
+ },
+ "node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/braces": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
+ "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "fill-range": "^7.1.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/call-bind-apply-helpers": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
+ "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/call-bound": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
+ "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.2",
+ "get-intrinsic": "^1.3.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/character-parser": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/character-parser/-/character-parser-2.2.0.tgz",
+ "integrity": "sha512-+UqJQjFEFaTAs3bNsF2j2kEN1baG/zghZbdqoYEDxGZtJo9LBzl1A+m0D4n3qKx8N2FNv8/Xp6yV9mQmBuptaw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "is-regex": "^1.0.3"
+ }
+ },
+ "node_modules/chevrotain": {
+ "version": "11.0.3",
+ "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz",
+ "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==",
+ "license": "Apache-2.0",
+ "dependencies": {
+ "@chevrotain/cst-dts-gen": "11.0.3",
+ "@chevrotain/gast": "11.0.3",
+ "@chevrotain/regexp-to-ast": "11.0.3",
+ "@chevrotain/types": "11.0.3",
+ "@chevrotain/utils": "11.0.3",
+ "lodash-es": "4.17.21"
+ }
+ },
+ "node_modules/chevrotain-allstar": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz",
+ "integrity": "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==",
+ "license": "MIT",
+ "dependencies": {
+ "lodash-es": "^4.17.21"
+ },
+ "peerDependencies": {
+ "chevrotain": "^11.0.0"
+ }
+ },
+ "node_modules/chokidar": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz",
+ "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "readdirp": "^4.0.1"
+ },
+ "engines": {
+ "node": ">= 14.16.0"
+ },
+ "funding": {
+ "url": "https://paulmillr.com/funding/"
+ }
+ },
+ "node_modules/cli-color": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/cli-color/-/cli-color-2.0.4.tgz",
+ "integrity": "sha512-zlnpg0jNcibNrO7GG9IeHH7maWFeCz+Ja1wx/7tZNU5ASSSSZ+/qZciM0/LHCYxSdqv5h2sdbQ/PXYdOuetXvA==",
+ "license": "ISC",
+ "dependencies": {
+ "d": "^1.0.1",
+ "es5-ext": "^0.10.64",
+ "es6-iterator": "^2.0.3",
+ "memoizee": "^0.4.15",
+ "timers-ext": "^0.1.7"
+ },
+ "engines": {
+ "node": ">=0.10"
+ }
+ },
+ "node_modules/code-red": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/code-red/-/code-red-1.0.4.tgz",
+ "integrity": "sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==",
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/sourcemap-codec": "^1.4.15",
+ "@types/estree": "^1.0.1",
+ "acorn": "^8.10.0",
+ "estree-walker": "^3.0.3",
+ "periscopic": "^3.1.0"
+ }
+ },
+ "node_modules/code-red/node_modules/estree-walker": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz",
+ "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "^1.0.0"
+ }
+ },
+ "node_modules/coffeescript": {
+ "version": "2.7.0",
+ "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.7.0.tgz",
+ "integrity": "sha512-hzWp6TUE2d/jCcN67LrW1eh5b/rSDKQK6oD6VMLlggYVUUFexgTH9z3dNYihzX4RMhze5FTUsUmOXViJKFQR/A==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "cake": "bin/cake",
+ "coffee": "bin/coffee"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "color-name": "~1.1.4"
+ },
+ "engines": {
+ "node": ">=7.0.0"
+ }
+ },
+ "node_modules/color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/commander": {
+ "version": "8.3.0",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
+ "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 12"
+ }
+ },
+ "node_modules/concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/confbox": {
+ "version": "0.2.2",
+ "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz",
+ "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==",
+ "license": "MIT"
+ },
+ "node_modules/constantinople": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-4.0.1.tgz",
+ "integrity": "sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/parser": "^7.6.0",
+ "@babel/types": "^7.6.1"
+ }
+ },
+ "node_modules/cose-base": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz",
+ "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==",
+ "license": "MIT",
+ "dependencies": {
+ "layout-base": "^1.0.0"
+ }
+ },
+ "node_modules/cross-spawn": {
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
+ "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/cross-spawn/node_modules/isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/cross-spawn/node_modules/which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "isexe": "^2.0.0"
+ },
+ "bin": {
+ "node-which": "bin/node-which"
+ },
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/css-tree": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz",
+ "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==",
+ "license": "MIT",
+ "dependencies": {
+ "mdn-data": "2.0.30",
+ "source-map-js": "^1.0.1"
+ },
+ "engines": {
+ "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
+ }
+ },
+ "node_modules/cssstyle": {
+ "version": "4.3.1",
+ "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.3.1.tgz",
+ "integrity": "sha512-ZgW+Jgdd7i52AaLYCriF8Mxqft0gD/R9i9wi6RWBhs1pqdPEzPjym7rvRKi397WmQFf3SlyUsszhw+VVCbx79Q==",
+ "license": "MIT",
+ "dependencies": {
+ "@asamuzakjp/css-color": "^3.1.2",
+ "rrweb-cssom": "^0.8.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/cytoscape": {
+ "version": "3.32.0",
+ "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.32.0.tgz",
+ "integrity": "sha512-5JHBC9n75kz5851jeklCPmZWcg3hUe6sjqJvyk3+hVqFaKcHwHgxsjeN1yLmggoUc6STbtm9/NQyabQehfjvWQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10"
+ }
+ },
+ "node_modules/cytoscape-cose-bilkent": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz",
+ "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==",
+ "license": "MIT",
+ "dependencies": {
+ "cose-base": "^1.0.0"
+ },
+ "peerDependencies": {
+ "cytoscape": "^3.2.0"
+ }
+ },
+ "node_modules/cytoscape-fcose": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz",
+ "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==",
+ "license": "MIT",
+ "dependencies": {
+ "cose-base": "^2.2.0"
+ },
+ "peerDependencies": {
+ "cytoscape": "^3.2.0"
+ }
+ },
+ "node_modules/cytoscape-fcose/node_modules/cose-base": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz",
+ "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==",
+ "license": "MIT",
+ "dependencies": {
+ "layout-base": "^2.0.0"
+ }
+ },
+ "node_modules/cytoscape-fcose/node_modules/layout-base": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz",
+ "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==",
+ "license": "MIT"
+ },
+ "node_modules/d": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/d/-/d-1.0.2.tgz",
+ "integrity": "sha512-MOqHvMWF9/9MX6nza0KgvFH4HpMU0EF5uUDXqX/BtxtU8NfB0QzRtJ8Oe/6SuS4kbhyzVJwjd97EA4PKrzJ8bw==",
+ "license": "ISC",
+ "dependencies": {
+ "es5-ext": "^0.10.64",
+ "type": "^2.7.2"
+ },
+ "engines": {
+ "node": ">=0.12"
+ }
+ },
+ "node_modules/d3": {
+ "version": "7.9.0",
+ "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz",
+ "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "3",
+ "d3-axis": "3",
+ "d3-brush": "3",
+ "d3-chord": "3",
+ "d3-color": "3",
+ "d3-contour": "4",
+ "d3-delaunay": "6",
+ "d3-dispatch": "3",
+ "d3-drag": "3",
+ "d3-dsv": "3",
+ "d3-ease": "3",
+ "d3-fetch": "3",
+ "d3-force": "3",
+ "d3-format": "3",
+ "d3-geo": "3",
+ "d3-hierarchy": "3",
+ "d3-interpolate": "3",
+ "d3-path": "3",
+ "d3-polygon": "3",
+ "d3-quadtree": "3",
+ "d3-random": "3",
+ "d3-scale": "4",
+ "d3-scale-chromatic": "3",
+ "d3-selection": "3",
+ "d3-shape": "3",
+ "d3-time": "3",
+ "d3-time-format": "4",
+ "d3-timer": "3",
+ "d3-transition": "3",
+ "d3-zoom": "3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-array": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz",
+ "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==",
+ "license": "ISC",
+ "dependencies": {
+ "internmap": "1 - 2"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-axis": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz",
+ "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-brush": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz",
+ "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dispatch": "1 - 3",
+ "d3-drag": "2 - 3",
+ "d3-interpolate": "1 - 3",
+ "d3-selection": "3",
+ "d3-transition": "3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-chord": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz",
+ "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-path": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-color": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz",
+ "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-contour": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz",
+ "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "^3.2.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-delaunay": {
+ "version": "6.0.4",
+ "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz",
+ "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==",
+ "license": "ISC",
+ "dependencies": {
+ "delaunator": "5"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-dispatch": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz",
+ "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-drag": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz",
+ "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dispatch": "1 - 3",
+ "d3-selection": "3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-dsv": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz",
+ "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==",
+ "license": "ISC",
+ "dependencies": {
+ "commander": "7",
+ "iconv-lite": "0.6",
+ "rw": "1"
+ },
+ "bin": {
+ "csv2json": "bin/dsv2json.js",
+ "csv2tsv": "bin/dsv2dsv.js",
+ "dsv2dsv": "bin/dsv2dsv.js",
+ "dsv2json": "bin/dsv2json.js",
+ "json2csv": "bin/json2dsv.js",
+ "json2dsv": "bin/json2dsv.js",
+ "json2tsv": "bin/json2dsv.js",
+ "tsv2csv": "bin/dsv2dsv.js",
+ "tsv2json": "bin/dsv2json.js"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-dsv/node_modules/commander": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz",
+ "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 10"
+ }
+ },
+ "node_modules/d3-ease": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz",
+ "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==",
+ "license": "BSD-3-Clause",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-fetch": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz",
+ "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dsv": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-force": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz",
+ "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dispatch": "1 - 3",
+ "d3-quadtree": "1 - 3",
+ "d3-timer": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-format": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz",
+ "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-geo": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz",
+ "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "2.5.0 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-hierarchy": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz",
+ "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-interpolate": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz",
+ "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-color": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-path": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz",
+ "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-polygon": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz",
+ "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-quadtree": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz",
+ "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-random": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz",
+ "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-sankey": {
+ "version": "0.12.3",
+ "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz",
+ "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "d3-array": "1 - 2",
+ "d3-shape": "^1.2.0"
+ }
+ },
+ "node_modules/d3-sankey/node_modules/d3-array": {
+ "version": "2.12.1",
+ "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz",
+ "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "internmap": "^1.0.0"
+ }
+ },
+ "node_modules/d3-sankey/node_modules/d3-path": {
+ "version": "1.0.9",
+ "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz",
+ "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==",
+ "license": "BSD-3-Clause"
+ },
+ "node_modules/d3-sankey/node_modules/d3-shape": {
+ "version": "1.3.7",
+ "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz",
+ "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "d3-path": "1"
+ }
+ },
+ "node_modules/d3-sankey/node_modules/internmap": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz",
+ "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==",
+ "license": "ISC"
+ },
+ "node_modules/d3-scale": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz",
+ "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "2.10.0 - 3",
+ "d3-format": "1 - 3",
+ "d3-interpolate": "1.2.0 - 3",
+ "d3-time": "2.1.1 - 3",
+ "d3-time-format": "2 - 4"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-scale-chromatic": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz",
+ "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-color": "1 - 3",
+ "d3-interpolate": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-selection": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz",
+ "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-shape": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz",
+ "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-path": "^3.1.0"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-time": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz",
+ "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-array": "2 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-time-format": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz",
+ "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-time": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-timer": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz",
+ "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/d3-transition": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz",
+ "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-color": "1 - 3",
+ "d3-dispatch": "1 - 3",
+ "d3-ease": "1 - 3",
+ "d3-interpolate": "1 - 3",
+ "d3-timer": "1 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "peerDependencies": {
+ "d3-selection": "2 - 3"
+ }
+ },
+ "node_modules/d3-zoom": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz",
+ "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==",
+ "license": "ISC",
+ "dependencies": {
+ "d3-dispatch": "1 - 3",
+ "d3-drag": "2 - 3",
+ "d3-interpolate": "1 - 3",
+ "d3-selection": "2 - 3",
+ "d3-transition": "2 - 3"
+ },
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/dagre-d3-es": {
+ "version": "7.0.11",
+ "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.11.tgz",
+ "integrity": "sha512-tvlJLyQf834SylNKax8Wkzco/1ias1OPw8DcUMDE7oUIoSEW25riQVuiu/0OWEFqT0cxHT3Pa9/D82Jr47IONw==",
+ "license": "MIT",
+ "dependencies": {
+ "d3": "^7.9.0",
+ "lodash-es": "^4.17.21"
+ }
+ },
+ "node_modules/data-urls": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz",
+ "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==",
+ "license": "MIT",
+ "dependencies": {
+ "whatwg-mimetype": "^4.0.0",
+ "whatwg-url": "^14.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/dayjs": {
+ "version": "1.11.13",
+ "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz",
+ "integrity": "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==",
+ "license": "MIT"
+ },
+ "node_modules/debug": {
+ "version": "4.4.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
+ "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
+ "license": "MIT",
+ "dependencies": {
+ "ms": "^2.1.3"
+ },
+ "engines": {
+ "node": ">=6.0"
+ },
+ "peerDependenciesMeta": {
+ "supports-color": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/decimal.js": {
+ "version": "10.5.0",
+ "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.5.0.tgz",
+ "integrity": "sha512-8vDa8Qxvr/+d94hSh5P3IJwI5t8/c0KsMp+g8bNw9cY2icONa5aPfvKeieW1WlG0WQYwwhJ7mjui2xtiePQSXw==",
+ "license": "MIT"
+ },
+ "node_modules/deepmerge": {
+ "version": "4.3.1",
+ "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz",
+ "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/delaunator": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz",
+ "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==",
+ "license": "ISC",
+ "dependencies": {
+ "robust-predicates": "^3.0.2"
+ }
+ },
+ "node_modules/detect-libc": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz",
+ "integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/doctypes": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz",
+ "integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/dom-serializer": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
+ "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
+ "license": "MIT",
+ "dependencies": {
+ "domelementtype": "^2.3.0",
+ "domhandler": "^5.0.2",
+ "entities": "^4.2.0"
+ },
+ "funding": {
+ "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1"
+ }
+ },
+ "node_modules/dom-serializer/node_modules/entities": {
+ "version": "4.5.0",
+ "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
+ "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
+ "license": "BSD-2-Clause",
+ "engines": {
+ "node": ">=0.12"
+ },
+ "funding": {
+ "url": "https://github.com/fb55/entities?sponsor=1"
+ }
+ },
+ "node_modules/domelementtype": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
+ "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/fb55"
+ }
+ ],
+ "license": "BSD-2-Clause"
+ },
+ "node_modules/domhandler": {
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
+ "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
+ "license": "BSD-2-Clause",
+ "dependencies": {
+ "domelementtype": "^2.3.0"
+ },
+ "engines": {
+ "node": ">= 4"
+ },
+ "funding": {
+ "url": "https://github.com/fb55/domhandler?sponsor=1"
+ }
+ },
+ "node_modules/dompurify": {
+ "version": "3.2.6",
+ "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.6.tgz",
+ "integrity": "sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==",
+ "license": "(MPL-2.0 OR Apache-2.0)",
+ "optionalDependencies": {
+ "@types/trusted-types": "^2.0.7"
+ }
+ },
+ "node_modules/domutils": {
+ "version": "3.2.2",
+ "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
+ "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
+ "license": "BSD-2-Clause",
+ "dependencies": {
+ "dom-serializer": "^2.0.0",
+ "domelementtype": "^2.3.0",
+ "domhandler": "^5.0.3"
+ },
+ "funding": {
+ "url": "https://github.com/fb55/domutils?sponsor=1"
+ }
+ },
+ "node_modules/dunder-proto": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
+ "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.1",
+ "es-errors": "^1.3.0",
+ "gopd": "^1.2.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/eastasianwidth": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
+ "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/emoji-regex": {
+ "version": "9.2.2",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
+ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/entities": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.0.tgz",
+ "integrity": "sha512-aKstq2TDOndCn4diEyp9Uq/Flu2i1GlLkc6XIDQSDMuaFE3OPW5OphLCyQ5SpSJZTb4reN+kTcYru5yIfXoRPw==",
+ "license": "BSD-2-Clause",
+ "engines": {
+ "node": ">=0.12"
+ },
+ "funding": {
+ "url": "https://github.com/fb55/entities?sponsor=1"
+ }
+ },
+ "node_modules/es-define-property": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
+ "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-errors": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
+ "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-object-atoms": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
+ "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es5-ext": {
+ "version": "0.10.64",
+ "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz",
+ "integrity": "sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==",
+ "hasInstallScript": true,
+ "license": "ISC",
+ "dependencies": {
+ "es6-iterator": "^2.0.3",
+ "es6-symbol": "^3.1.3",
+ "esniff": "^2.0.1",
+ "next-tick": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=0.10"
+ }
+ },
+ "node_modules/es6-iterator": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz",
+ "integrity": "sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==",
+ "license": "MIT",
+ "dependencies": {
+ "d": "1",
+ "es5-ext": "^0.10.35",
+ "es6-symbol": "^3.1.1"
+ }
+ },
+ "node_modules/es6-symbol": {
+ "version": "3.1.4",
+ "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.4.tgz",
+ "integrity": "sha512-U9bFFjX8tFiATgtkJ1zg25+KviIXpgRvRHS8sau3GfhVzThRQrOeksPeT0BWW2MNZs1OEWJ1DPXOQMn0KKRkvg==",
+ "license": "ISC",
+ "dependencies": {
+ "d": "^1.0.2",
+ "ext": "^1.7.0"
+ },
+ "engines": {
+ "node": ">=0.12"
+ }
+ },
+ "node_modules/es6-weak-map": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.3.tgz",
+ "integrity": "sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==",
+ "license": "ISC",
+ "dependencies": {
+ "d": "1",
+ "es5-ext": "^0.10.46",
+ "es6-iterator": "^2.0.3",
+ "es6-symbol": "^3.1.1"
+ }
+ },
+ "node_modules/esbuild": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.54.tgz",
+ "integrity": "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "optionalDependencies": {
+ "@esbuild/linux-loong64": "0.14.54",
+ "esbuild-android-64": "0.14.54",
+ "esbuild-android-arm64": "0.14.54",
+ "esbuild-darwin-64": "0.14.54",
+ "esbuild-darwin-arm64": "0.14.54",
+ "esbuild-freebsd-64": "0.14.54",
+ "esbuild-freebsd-arm64": "0.14.54",
+ "esbuild-linux-32": "0.14.54",
+ "esbuild-linux-64": "0.14.54",
+ "esbuild-linux-arm": "0.14.54",
+ "esbuild-linux-arm64": "0.14.54",
+ "esbuild-linux-mips64le": "0.14.54",
+ "esbuild-linux-ppc64le": "0.14.54",
+ "esbuild-linux-riscv64": "0.14.54",
+ "esbuild-linux-s390x": "0.14.54",
+ "esbuild-netbsd-64": "0.14.54",
+ "esbuild-openbsd-64": "0.14.54",
+ "esbuild-sunos-64": "0.14.54",
+ "esbuild-windows-32": "0.14.54",
+ "esbuild-windows-64": "0.14.54",
+ "esbuild-windows-arm64": "0.14.54"
+ }
+ },
+ "node_modules/esbuild-android-64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.54.tgz",
+ "integrity": "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-android-arm64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.54.tgz",
+ "integrity": "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-darwin-64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.54.tgz",
+ "integrity": "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-darwin-arm64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.54.tgz",
+ "integrity": "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-freebsd-64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.54.tgz",
+ "integrity": "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-freebsd-arm64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.54.tgz",
+ "integrity": "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-linux-32": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.54.tgz",
+ "integrity": "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-linux-64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.54.tgz",
+ "integrity": "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-linux-arm": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.54.tgz",
+ "integrity": "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-linux-arm64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.54.tgz",
+ "integrity": "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-linux-mips64le": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.54.tgz",
+ "integrity": "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-linux-ppc64le": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.54.tgz",
+ "integrity": "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-linux-riscv64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.54.tgz",
+ "integrity": "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-linux-s390x": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.54.tgz",
+ "integrity": "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-netbsd-64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.54.tgz",
+ "integrity": "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-openbsd-64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.54.tgz",
+ "integrity": "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-sunos-64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.54.tgz",
+ "integrity": "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-windows-32": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.54.tgz",
+ "integrity": "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-windows-64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.54.tgz",
+ "integrity": "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/esbuild-windows-arm64": {
+ "version": "0.14.54",
+ "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.54.tgz",
+ "integrity": "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/escape-string-regexp": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/esniff": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/esniff/-/esniff-2.0.1.tgz",
+ "integrity": "sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==",
+ "license": "ISC",
+ "dependencies": {
+ "d": "^1.0.1",
+ "es5-ext": "^0.10.62",
+ "event-emitter": "^0.3.5",
+ "type": "^2.7.2"
+ },
+ "engines": {
+ "node": ">=0.10"
+ }
+ },
+ "node_modules/estree-walker": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz",
+ "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==",
+ "license": "MIT"
+ },
+ "node_modules/event-emitter": {
+ "version": "0.3.5",
+ "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz",
+ "integrity": "sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==",
+ "license": "MIT",
+ "dependencies": {
+ "d": "1",
+ "es5-ext": "~0.10.14"
+ }
+ },
+ "node_modules/exsolve": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.5.tgz",
+ "integrity": "sha512-pz5dvkYYKQ1AHVrgOzBKWeP4u4FRb3a6DNK2ucr0OoNwYIU4QWsJ+NM36LLzORT+z845MzKHHhpXiUF5nvQoJg==",
+ "license": "MIT"
+ },
+ "node_modules/ext": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/ext/-/ext-1.7.0.tgz",
+ "integrity": "sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw==",
+ "license": "ISC",
+ "dependencies": {
+ "type": "^2.7.2"
+ }
+ },
+ "node_modules/fill-range": {
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
+ "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "to-regex-range": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/foreground-child": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
+ "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "cross-spawn": "^7.0.6",
+ "signal-exit": "^4.0.1"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/fsevents": {
+ "version": "2.3.3",
+ "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
+ "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
+ }
+ },
+ "node_modules/function-bind": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
+ "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/get-intrinsic": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
+ "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.2",
+ "es-define-property": "^1.0.1",
+ "es-errors": "^1.3.0",
+ "es-object-atoms": "^1.1.1",
+ "function-bind": "^1.1.2",
+ "get-proto": "^1.0.1",
+ "gopd": "^1.2.0",
+ "has-symbols": "^1.1.0",
+ "hasown": "^2.0.2",
+ "math-intrinsics": "^1.1.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/get-proto": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
+ "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "dunder-proto": "^1.0.1",
+ "es-object-atoms": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/github-slugger": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz",
+ "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==",
+ "license": "ISC"
+ },
+ "node_modules/glob": {
+ "version": "7.2.3",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+ "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+ "deprecated": "Glob versions prior to v9 are no longer supported",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.1.1",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/globals": {
+ "version": "15.15.0",
+ "resolved": "https://registry.npmjs.org/globals/-/globals-15.15.0.tgz",
+ "integrity": "sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/globalyzer": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz",
+ "integrity": "sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==",
+ "license": "MIT"
+ },
+ "node_modules/globrex": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz",
+ "integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==",
+ "license": "MIT"
+ },
+ "node_modules/gopd": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
+ "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/hachure-fill": {
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz",
+ "integrity": "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==",
+ "license": "MIT"
+ },
+ "node_modules/has-symbols": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
+ "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-tostringtag": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
+ "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "has-symbols": "^1.0.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/hasown": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
+ "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "function-bind": "^1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/html-encoding-sniffer": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz",
+ "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==",
+ "license": "MIT",
+ "dependencies": {
+ "whatwg-encoding": "^3.1.1"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/htmlparser2": {
+ "version": "8.0.2",
+ "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz",
+ "integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==",
+ "funding": [
+ "https://github.com/fb55/htmlparser2?sponsor=1",
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/fb55"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "domelementtype": "^2.3.0",
+ "domhandler": "^5.0.3",
+ "domutils": "^3.0.1",
+ "entities": "^4.4.0"
+ }
+ },
+ "node_modules/htmlparser2/node_modules/entities": {
+ "version": "4.5.0",
+ "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
+ "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
+ "license": "BSD-2-Clause",
+ "engines": {
+ "node": ">=0.12"
+ },
+ "funding": {
+ "url": "https://github.com/fb55/entities?sponsor=1"
+ }
+ },
+ "node_modules/http-proxy-agent": {
+ "version": "7.0.2",
+ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
+ "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
+ "license": "MIT",
+ "dependencies": {
+ "agent-base": "^7.1.0",
+ "debug": "^4.3.4"
+ },
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/https-proxy-agent": {
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
+ "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
+ "license": "MIT",
+ "dependencies": {
+ "agent-base": "^7.1.2",
+ "debug": "4"
+ },
+ "engines": {
+ "node": ">= 14"
+ }
+ },
+ "node_modules/iconv-lite": {
+ "version": "0.6.3",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
+ "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
+ "license": "MIT",
+ "dependencies": {
+ "safer-buffer": ">= 2.1.2 < 3.0.0"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/immutable": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/immutable/-/immutable-5.1.2.tgz",
+ "integrity": "sha512-qHKXW1q6liAk1Oys6umoaZbDRqjcjgSrbnrifHsfsttza7zcvRAsL7mMV6xWcyhwQy7Xj5v4hhbr6b+iDYwlmQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
+ "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "node_modules/inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/internmap": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz",
+ "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/intl-messageformat": {
+ "version": "9.13.0",
+ "resolved": "https://registry.npmjs.org/intl-messageformat/-/intl-messageformat-9.13.0.tgz",
+ "integrity": "sha512-7sGC7QnSQGa5LZP7bXLDhVDtQOeKGeBFGHF2Y8LVBwYZoQZCgWeKoPGTa5GMG8g/TzDgeXuYJQis7Ggiw2xTOw==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "@formatjs/ecma402-abstract": "1.11.4",
+ "@formatjs/fast-memoize": "1.2.1",
+ "@formatjs/icu-messageformat-parser": "2.1.0",
+ "tslib": "^2.1.0"
+ }
+ },
+ "node_modules/is-core-module": {
+ "version": "2.16.1",
+ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
+ "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "hasown": "^2.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/is-expression": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-4.0.0.tgz",
+ "integrity": "sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "acorn": "^7.1.1",
+ "object-assign": "^4.1.1"
+ }
+ },
+ "node_modules/is-expression/node_modules/acorn": {
+ "version": "7.4.1",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz",
+ "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "acorn": "bin/acorn"
+ },
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/is-extglob": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+ "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/is-glob": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
+ "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "is-extglob": "^2.1.1"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-number": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+ "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "engines": {
+ "node": ">=0.12.0"
+ }
+ },
+ "node_modules/is-plain-object": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
+ "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/is-potential-custom-element-name": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
+ "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==",
+ "license": "MIT"
+ },
+ "node_modules/is-promise": {
+ "version": "2.2.2",
+ "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz",
+ "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==",
+ "license": "MIT"
+ },
+ "node_modules/is-reference": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz",
+ "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "^1.0.6"
+ }
+ },
+ "node_modules/is-regex": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz",
+ "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "call-bound": "^1.0.2",
+ "gopd": "^1.2.0",
+ "has-tostringtag": "^1.0.2",
+ "hasown": "^2.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/isexe": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
+ "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
+ "dev": true,
+ "license": "ISC",
+ "engines": {
+ "node": ">=16"
+ }
+ },
+ "node_modules/isomorphic-dompurify": {
+ "version": "2.25.0",
+ "resolved": "https://registry.npmjs.org/isomorphic-dompurify/-/isomorphic-dompurify-2.25.0.tgz",
+ "integrity": "sha512-bcpJzu9DOjN21qaCVpcoCwUX1ytpvA6EFqCK5RNtPg5+F0Jz9PX50jl6jbEicBNeO87eDDfC7XtPs4zjDClZJg==",
+ "license": "MIT",
+ "dependencies": {
+ "dompurify": "^3.2.6",
+ "jsdom": "^26.1.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/jackspeak": {
+ "version": "3.4.3",
+ "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
+ "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
+ "dev": true,
+ "license": "BlueOak-1.0.0",
+ "dependencies": {
+ "@isaacs/cliui": "^8.0.2"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ },
+ "optionalDependencies": {
+ "@pkgjs/parseargs": "^0.11.0"
+ }
+ },
+ "node_modules/js-stringify": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/js-stringify/-/js-stringify-1.0.2.tgz",
+ "integrity": "sha512-rtS5ATOo2Q5k1G+DADISilDA6lv79zIiwFd6CcjuIxGKLFm5C+RLImRscVap9k55i+MOZwgliw+NejvkLuGD5g==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/jsdom": {
+ "version": "26.1.0",
+ "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-26.1.0.tgz",
+ "integrity": "sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg==",
+ "license": "MIT",
+ "dependencies": {
+ "cssstyle": "^4.2.1",
+ "data-urls": "^5.0.0",
+ "decimal.js": "^10.5.0",
+ "html-encoding-sniffer": "^4.0.0",
+ "http-proxy-agent": "^7.0.2",
+ "https-proxy-agent": "^7.0.6",
+ "is-potential-custom-element-name": "^1.0.1",
+ "nwsapi": "^2.2.16",
+ "parse5": "^7.2.1",
+ "rrweb-cssom": "^0.8.0",
+ "saxes": "^6.0.0",
+ "symbol-tree": "^3.2.4",
+ "tough-cookie": "^5.1.1",
+ "w3c-xmlserializer": "^5.0.0",
+ "webidl-conversions": "^7.0.0",
+ "whatwg-encoding": "^3.1.1",
+ "whatwg-mimetype": "^4.0.0",
+ "whatwg-url": "^14.1.1",
+ "ws": "^8.18.0",
+ "xml-name-validator": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "peerDependencies": {
+ "canvas": "^3.0.0"
+ },
+ "peerDependenciesMeta": {
+ "canvas": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/jstransformer": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/jstransformer/-/jstransformer-1.0.0.tgz",
+ "integrity": "sha512-C9YK3Rf8q6VAPDCCU9fnqo3mAfOH6vUGnMcP4AQAYIEpWtfGLpwOTmZ+igtdK5y+VvI2n3CyYSzy4Qh34eq24A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "is-promise": "^2.0.0",
+ "promise": "^7.0.1"
+ }
+ },
+ "node_modules/katex": {
+ "version": "0.16.22",
+ "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.22.tgz",
+ "integrity": "sha512-XCHRdUw4lf3SKBaJe4EvgqIuWwkPSo9XoeO8GjQW94Bp7TWv9hNhzZjZ+OH9yf1UmLygb7DIT5GSFQiyt16zYg==",
+ "funding": [
+ "https://opencollective.com/katex",
+ "https://github.com/sponsors/katex"
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "commander": "^8.3.0"
+ },
+ "bin": {
+ "katex": "cli.js"
+ }
+ },
+ "node_modules/khroma": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz",
+ "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw=="
+ },
+ "node_modules/kleur": {
+ "version": "4.1.5",
+ "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz",
+ "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/kolorist": {
+ "version": "1.8.0",
+ "resolved": "https://registry.npmjs.org/kolorist/-/kolorist-1.8.0.tgz",
+ "integrity": "sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==",
+ "license": "MIT"
+ },
+ "node_modules/langium": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/langium/-/langium-3.3.1.tgz",
+ "integrity": "sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==",
+ "license": "MIT",
+ "dependencies": {
+ "chevrotain": "~11.0.3",
+ "chevrotain-allstar": "~0.3.0",
+ "vscode-languageserver": "~9.0.1",
+ "vscode-languageserver-textdocument": "~1.0.11",
+ "vscode-uri": "~3.0.8"
+ },
+ "engines": {
+ "node": ">=16.0.0"
+ }
+ },
+ "node_modules/layout-base": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz",
+ "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==",
+ "license": "MIT"
+ },
+ "node_modules/lightningcss": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz",
+ "integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==",
+ "dev": true,
+ "license": "MPL-2.0",
+ "dependencies": {
+ "detect-libc": "^2.0.3"
+ },
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ },
+ "optionalDependencies": {
+ "lightningcss-darwin-arm64": "1.30.1",
+ "lightningcss-darwin-x64": "1.30.1",
+ "lightningcss-freebsd-x64": "1.30.1",
+ "lightningcss-linux-arm-gnueabihf": "1.30.1",
+ "lightningcss-linux-arm64-gnu": "1.30.1",
+ "lightningcss-linux-arm64-musl": "1.30.1",
+ "lightningcss-linux-x64-gnu": "1.30.1",
+ "lightningcss-linux-x64-musl": "1.30.1",
+ "lightningcss-win32-arm64-msvc": "1.30.1",
+ "lightningcss-win32-x64-msvc": "1.30.1"
+ }
+ },
+ "node_modules/lightningcss-darwin-arm64": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz",
+ "integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-darwin-x64": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.1.tgz",
+ "integrity": "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-freebsd-x64": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.1.tgz",
+ "integrity": "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-linux-arm-gnueabihf": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.1.tgz",
+ "integrity": "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-linux-arm64-gnu": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.1.tgz",
+ "integrity": "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-linux-arm64-musl": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.1.tgz",
+ "integrity": "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-linux-x64-gnu": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.1.tgz",
+ "integrity": "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-linux-x64-musl": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.1.tgz",
+ "integrity": "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-win32-arm64-msvc": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.1.tgz",
+ "integrity": "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lightningcss-win32-x64-msvc": {
+ "version": "1.30.1",
+ "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.1.tgz",
+ "integrity": "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MPL-2.0",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">= 12.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/parcel"
+ }
+ },
+ "node_modules/lines-and-columns": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
+ "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/local-pkg": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-1.1.1.tgz",
+ "integrity": "sha512-WunYko2W1NcdfAFpuLUoucsgULmgDBRkdxHxWQ7mK0cQqwPiy8E1enjuRBrhLtZkB5iScJ1XIPdhVEFK8aOLSg==",
+ "license": "MIT",
+ "dependencies": {
+ "mlly": "^1.7.4",
+ "pkg-types": "^2.0.1",
+ "quansync": "^0.2.8"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/antfu"
+ }
+ },
+ "node_modules/locate-character": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz",
+ "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==",
+ "license": "MIT"
+ },
+ "node_modules/lodash-es": {
+ "version": "4.17.21",
+ "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz",
+ "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==",
+ "license": "MIT"
+ },
+ "node_modules/lru-cache": {
+ "version": "10.4.3",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
+ "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
+ "license": "ISC"
+ },
+ "node_modules/lru-queue": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/lru-queue/-/lru-queue-0.1.0.tgz",
+ "integrity": "sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==",
+ "license": "MIT",
+ "dependencies": {
+ "es5-ext": "~0.10.2"
+ }
+ },
+ "node_modules/magic-string": {
+ "version": "0.30.17",
+ "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz",
+ "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==",
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/sourcemap-codec": "^1.5.0"
+ }
+ },
+ "node_modules/marked": {
+ "version": "12.0.2",
+ "resolved": "https://registry.npmjs.org/marked/-/marked-12.0.2.tgz",
+ "integrity": "sha512-qXUm7e/YKFoqFPYPa3Ukg9xlI5cyAtGmyEIzMfW//m6kXwCy2Ps9DYf5ioijFKQ8qyuscrHoY04iJGctu2Kg0Q==",
+ "license": "MIT",
+ "bin": {
+ "marked": "bin/marked.js"
+ },
+ "engines": {
+ "node": ">= 18"
+ }
+ },
+ "node_modules/marked-gfm-heading-id": {
+ "version": "3.2.0",
+ "resolved": "https://registry.npmjs.org/marked-gfm-heading-id/-/marked-gfm-heading-id-3.2.0.tgz",
+ "integrity": "sha512-Xfxpr5lXLDLY10XqzSCA9l2dDaiabQUgtYM9hw8yunyVsB/xYBRpiic6BOiY/EAJw1ik1eWr1ET1HKOAPZBhXg==",
+ "license": "MIT",
+ "dependencies": {
+ "github-slugger": "^2.0.0"
+ },
+ "peerDependencies": {
+ "marked": ">=4 <13"
+ }
+ },
+ "node_modules/marked-highlight": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/marked-highlight/-/marked-highlight-2.2.1.tgz",
+ "integrity": "sha512-SiCIeEiQbs9TxGwle9/OwbOejHCZsohQRaNTY2u8euEXYt2rYUFoiImUirThU3Gd/o6Q1gHGtH9qloHlbJpNIA==",
+ "license": "MIT",
+ "peerDependencies": {
+ "marked": ">=4 <16"
+ }
+ },
+ "node_modules/math-intrinsics": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
+ "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/mdn-data": {
+ "version": "2.0.30",
+ "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz",
+ "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==",
+ "license": "CC0-1.0"
+ },
+ "node_modules/memoizee": {
+ "version": "0.4.17",
+ "resolved": "https://registry.npmjs.org/memoizee/-/memoizee-0.4.17.tgz",
+ "integrity": "sha512-DGqD7Hjpi/1or4F/aYAspXKNm5Yili0QDAFAY4QYvpqpgiY6+1jOfqpmByzjxbWd/T9mChbCArXAbDAsTm5oXA==",
+ "license": "ISC",
+ "dependencies": {
+ "d": "^1.0.2",
+ "es5-ext": "^0.10.64",
+ "es6-weak-map": "^2.0.3",
+ "event-emitter": "^0.3.5",
+ "is-promise": "^2.2.2",
+ "lru-queue": "^0.1.0",
+ "next-tick": "^1.1.0",
+ "timers-ext": "^0.1.7"
+ },
+ "engines": {
+ "node": ">=0.12"
+ }
+ },
+ "node_modules/mermaid": {
+ "version": "11.6.0",
+ "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.6.0.tgz",
+ "integrity": "sha512-PE8hGUy1LDlWIHWBP05SFdqUHGmRcCcK4IzpOKPE35eOw+G9zZgcnMpyunJVUEOgb//KBORPjysKndw8bFLuRg==",
+ "license": "MIT",
+ "dependencies": {
+ "@braintree/sanitize-url": "^7.0.4",
+ "@iconify/utils": "^2.1.33",
+ "@mermaid-js/parser": "^0.4.0",
+ "@types/d3": "^7.4.3",
+ "cytoscape": "^3.29.3",
+ "cytoscape-cose-bilkent": "^4.1.0",
+ "cytoscape-fcose": "^2.2.0",
+ "d3": "^7.9.0",
+ "d3-sankey": "^0.12.3",
+ "dagre-d3-es": "7.0.11",
+ "dayjs": "^1.11.13",
+ "dompurify": "^3.2.4",
+ "katex": "^0.16.9",
+ "khroma": "^2.1.0",
+ "lodash-es": "^4.17.21",
+ "marked": "^15.0.7",
+ "roughjs": "^4.6.6",
+ "stylis": "^4.3.6",
+ "ts-dedent": "^2.2.0",
+ "uuid": "^11.1.0"
+ }
+ },
+ "node_modules/mermaid/node_modules/marked": {
+ "version": "15.0.12",
+ "resolved": "https://registry.npmjs.org/marked/-/marked-15.0.12.tgz",
+ "integrity": "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA==",
+ "license": "MIT",
+ "bin": {
+ "marked": "bin/marked.js"
+ },
+ "engines": {
+ "node": ">= 18"
+ }
+ },
+ "node_modules/micromatch": {
+ "version": "4.0.8",
+ "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
+ "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "braces": "^3.0.3",
+ "picomatch": "^2.3.1"
+ },
+ "engines": {
+ "node": ">=8.6"
+ }
+ },
+ "node_modules/micromatch/node_modules/picomatch": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+ "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "engines": {
+ "node": ">=8.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/minimatch": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+ "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
+ "node_modules/minipass": {
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
+ "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
+ "dev": true,
+ "license": "ISC",
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ }
+ },
+ "node_modules/mlly": {
+ "version": "1.7.4",
+ "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.4.tgz",
+ "integrity": "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==",
+ "license": "MIT",
+ "dependencies": {
+ "acorn": "^8.14.0",
+ "pathe": "^2.0.1",
+ "pkg-types": "^1.3.0",
+ "ufo": "^1.5.4"
+ }
+ },
+ "node_modules/mlly/node_modules/confbox": {
+ "version": "0.1.8",
+ "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz",
+ "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==",
+ "license": "MIT"
+ },
+ "node_modules/mlly/node_modules/pkg-types": {
+ "version": "1.3.1",
+ "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz",
+ "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==",
+ "license": "MIT",
+ "dependencies": {
+ "confbox": "^0.1.8",
+ "mlly": "^1.7.4",
+ "pathe": "^2.0.1"
+ }
+ },
+ "node_modules/mri": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz",
+ "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "license": "MIT"
+ },
+ "node_modules/mz": {
+ "version": "2.7.0",
+ "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz",
+ "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "any-promise": "^1.0.0",
+ "object-assign": "^4.0.1",
+ "thenify-all": "^1.0.0"
+ }
+ },
+ "node_modules/nanoid": {
+ "version": "3.3.11",
+ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
+ "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
+ "funding": [
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "MIT",
+ "bin": {
+ "nanoid": "bin/nanoid.cjs"
+ },
+ "engines": {
+ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
+ }
+ },
+ "node_modules/next-tick": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz",
+ "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==",
+ "license": "ISC"
+ },
+ "node_modules/node-addon-api": {
+ "version": "7.1.1",
+ "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz",
+ "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true
+ },
+ "node_modules/nwsapi": {
+ "version": "2.2.20",
+ "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.20.tgz",
+ "integrity": "sha512-/ieB+mDe4MrrKMT8z+mQL8klXydZWGR5Dowt4RAGKbJ3kIGEx3X4ljUo+6V73IXtUPWgfOlU5B9MlGxFO5T+cA==",
+ "license": "MIT"
+ },
+ "node_modules/object-assign": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
+ "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "wrappy": "1"
+ }
+ },
+ "node_modules/package-json-from-dist": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
+ "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
+ "dev": true,
+ "license": "BlueOak-1.0.0"
+ },
+ "node_modules/package-manager-detector": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.3.0.tgz",
+ "integrity": "sha512-ZsEbbZORsyHuO00lY1kV3/t72yp6Ysay6Pd17ZAlNGuGwmWDLCJxFpRs0IzfXfj1o4icJOkUEioexFHzyPurSQ==",
+ "license": "MIT"
+ },
+ "node_modules/parse-srcset": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/parse-srcset/-/parse-srcset-1.0.2.tgz",
+ "integrity": "sha512-/2qh0lav6CmI15FzA3i/2Bzk2zCgQhGMkvhOhKNcBVQ1ldgpbfiNTVslmooUmWJcADi1f1kIeynbDRVzNlfR6Q==",
+ "license": "MIT"
+ },
+ "node_modules/parse5": {
+ "version": "7.3.0",
+ "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz",
+ "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
+ "license": "MIT",
+ "dependencies": {
+ "entities": "^6.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/inikulin/parse5?sponsor=1"
+ }
+ },
+ "node_modules/path-data-parser": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz",
+ "integrity": "sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==",
+ "license": "MIT"
+ },
+ "node_modules/path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/path-parse": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
+ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/path-scurry": {
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
+ "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
+ "dev": true,
+ "license": "BlueOak-1.0.0",
+ "dependencies": {
+ "lru-cache": "^10.2.0",
+ "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.18"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/pathe": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
+ "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
+ "license": "MIT"
+ },
+ "node_modules/periscopic": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz",
+ "integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "^1.0.0",
+ "estree-walker": "^3.0.0",
+ "is-reference": "^3.0.0"
+ }
+ },
+ "node_modules/periscopic/node_modules/estree-walker": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz",
+ "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "^1.0.0"
+ }
+ },
+ "node_modules/picocolors": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
+ "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
+ "license": "ISC"
+ },
+ "node_modules/picomatch": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
+ "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/pirates": {
+ "version": "4.0.7",
+ "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz",
+ "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/pkg-types": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.1.0.tgz",
+ "integrity": "sha512-wmJwA+8ihJixSoHKxZJRBQG1oY8Yr9pGLzRmSsNms0iNWyHHAlZCa7mmKiFR10YPZuz/2k169JiS/inOjBCZ2A==",
+ "license": "MIT",
+ "dependencies": {
+ "confbox": "^0.2.1",
+ "exsolve": "^1.0.1",
+ "pathe": "^2.0.3"
+ }
+ },
+ "node_modules/points-on-curve": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/points-on-curve/-/points-on-curve-0.2.0.tgz",
+ "integrity": "sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==",
+ "license": "MIT"
+ },
+ "node_modules/points-on-path": {
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/points-on-path/-/points-on-path-0.2.1.tgz",
+ "integrity": "sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==",
+ "license": "MIT",
+ "dependencies": {
+ "path-data-parser": "0.1.0",
+ "points-on-curve": "0.2.0"
+ }
+ },
+ "node_modules/postcss": {
+ "version": "8.5.4",
+ "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.4.tgz",
+ "integrity": "sha512-QSa9EBe+uwlGTFmHsPKokv3B/oEMQZxfqW0QqNCyhpa6mB1afzulwn8hihglqAb2pOw+BJgNlmXQ8la2VeHB7w==",
+ "funding": [
+ {
+ "type": "opencollective",
+ "url": "https://opencollective.com/postcss/"
+ },
+ {
+ "type": "tidelift",
+ "url": "https://tidelift.com/funding/github/npm/postcss"
+ },
+ {
+ "type": "github",
+ "url": "https://github.com/sponsors/ai"
+ }
+ ],
+ "license": "MIT",
+ "dependencies": {
+ "nanoid": "^3.3.11",
+ "picocolors": "^1.1.1",
+ "source-map-js": "^1.2.1"
+ },
+ "engines": {
+ "node": "^10 || ^12 || >=14"
+ }
+ },
+ "node_modules/prismjs": {
+ "version": "1.29.0",
+ "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz",
+ "integrity": "sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/promise": {
+ "version": "7.3.1",
+ "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz",
+ "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "asap": "~2.0.3"
+ }
+ },
+ "node_modules/pug": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/pug/-/pug-3.0.3.tgz",
+ "integrity": "sha512-uBi6kmc9f3SZ3PXxqcHiUZLmIXgfgWooKWXcwSGwQd2Zi5Rb0bT14+8CJjJgI8AB+nndLaNgHGrcc6bPIB665g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "pug-code-gen": "^3.0.3",
+ "pug-filters": "^4.0.0",
+ "pug-lexer": "^5.0.1",
+ "pug-linker": "^4.0.0",
+ "pug-load": "^3.0.0",
+ "pug-parser": "^6.0.0",
+ "pug-runtime": "^3.0.1",
+ "pug-strip-comments": "^2.0.0"
+ }
+ },
+ "node_modules/pug-attrs": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-3.0.0.tgz",
+ "integrity": "sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "constantinople": "^4.0.1",
+ "js-stringify": "^1.0.2",
+ "pug-runtime": "^3.0.0"
+ }
+ },
+ "node_modules/pug-code-gen": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-3.0.3.tgz",
+ "integrity": "sha512-cYQg0JW0w32Ux+XTeZnBEeuWrAY7/HNE6TWnhiHGnnRYlCgyAUPoyh9KzCMa9WhcJlJ1AtQqpEYHc+vbCzA+Aw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "constantinople": "^4.0.1",
+ "doctypes": "^1.1.0",
+ "js-stringify": "^1.0.2",
+ "pug-attrs": "^3.0.0",
+ "pug-error": "^2.1.0",
+ "pug-runtime": "^3.0.1",
+ "void-elements": "^3.1.0",
+ "with": "^7.0.0"
+ }
+ },
+ "node_modules/pug-error": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-2.1.0.tgz",
+ "integrity": "sha512-lv7sU9e5Jk8IeUheHata6/UThZ7RK2jnaaNztxfPYUY+VxZyk/ePVaNZ/vwmH8WqGvDz3LrNYt/+gA55NDg6Pg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/pug-filters": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-4.0.0.tgz",
+ "integrity": "sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "constantinople": "^4.0.1",
+ "jstransformer": "1.0.0",
+ "pug-error": "^2.0.0",
+ "pug-walk": "^2.0.0",
+ "resolve": "^1.15.1"
+ }
+ },
+ "node_modules/pug-lexer": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-5.0.1.tgz",
+ "integrity": "sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "character-parser": "^2.2.0",
+ "is-expression": "^4.0.0",
+ "pug-error": "^2.0.0"
+ }
+ },
+ "node_modules/pug-linker": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-4.0.0.tgz",
+ "integrity": "sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "pug-error": "^2.0.0",
+ "pug-walk": "^2.0.0"
+ }
+ },
+ "node_modules/pug-load": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-3.0.0.tgz",
+ "integrity": "sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "object-assign": "^4.1.1",
+ "pug-walk": "^2.0.0"
+ }
+ },
+ "node_modules/pug-parser": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-6.0.0.tgz",
+ "integrity": "sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "pug-error": "^2.0.0",
+ "token-stream": "1.0.0"
+ }
+ },
+ "node_modules/pug-runtime": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-3.0.1.tgz",
+ "integrity": "sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/pug-strip-comments": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz",
+ "integrity": "sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "pug-error": "^2.0.0"
+ }
+ },
+ "node_modules/pug-walk": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-2.0.0.tgz",
+ "integrity": "sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/punycode": {
+ "version": "2.3.1",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
+ "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/quansync": {
+ "version": "0.2.10",
+ "resolved": "https://registry.npmjs.org/quansync/-/quansync-0.2.10.tgz",
+ "integrity": "sha512-t41VRkMYbkHyCYmOvx/6URnN80H7k4X0lLdBMGsz+maAwrJQYB1djpV6vHrQIBE0WBSGqhtEHrK9U3DWWH8v7A==",
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/antfu"
+ },
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/sxzz"
+ }
+ ],
+ "license": "MIT"
+ },
+ "node_modules/readdirp": {
+ "version": "4.1.2",
+ "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz",
+ "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 14.18.0"
+ },
+ "funding": {
+ "type": "individual",
+ "url": "https://paulmillr.com/funding/"
+ }
+ },
+ "node_modules/resolve": {
+ "version": "1.22.10",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
+ "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "is-core-module": "^2.16.0",
+ "path-parse": "^1.0.7",
+ "supports-preserve-symlinks-flag": "^1.0.0"
+ },
+ "bin": {
+ "resolve": "bin/resolve"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/robust-predicates": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz",
+ "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==",
+ "license": "Unlicense"
+ },
+ "node_modules/rollup": {
+ "version": "4.41.1",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.41.1.tgz",
+ "integrity": "sha512-cPmwD3FnFv8rKMBc1MxWCwVQFxwf1JEmSX3iQXrRVVG15zerAIXRjMFVWnd5Q5QvgKF7Aj+5ykXFhUl+QGnyOw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "1.0.7"
+ },
+ "bin": {
+ "rollup": "dist/bin/rollup"
+ },
+ "engines": {
+ "node": ">=18.0.0",
+ "npm": ">=8.0.0"
+ },
+ "optionalDependencies": {
+ "@rollup/rollup-android-arm-eabi": "4.41.1",
+ "@rollup/rollup-android-arm64": "4.41.1",
+ "@rollup/rollup-darwin-arm64": "4.41.1",
+ "@rollup/rollup-darwin-x64": "4.41.1",
+ "@rollup/rollup-freebsd-arm64": "4.41.1",
+ "@rollup/rollup-freebsd-x64": "4.41.1",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.41.1",
+ "@rollup/rollup-linux-arm-musleabihf": "4.41.1",
+ "@rollup/rollup-linux-arm64-gnu": "4.41.1",
+ "@rollup/rollup-linux-arm64-musl": "4.41.1",
+ "@rollup/rollup-linux-loongarch64-gnu": "4.41.1",
+ "@rollup/rollup-linux-powerpc64le-gnu": "4.41.1",
+ "@rollup/rollup-linux-riscv64-gnu": "4.41.1",
+ "@rollup/rollup-linux-riscv64-musl": "4.41.1",
+ "@rollup/rollup-linux-s390x-gnu": "4.41.1",
+ "@rollup/rollup-linux-x64-gnu": "4.41.1",
+ "@rollup/rollup-linux-x64-musl": "4.41.1",
+ "@rollup/rollup-win32-arm64-msvc": "4.41.1",
+ "@rollup/rollup-win32-ia32-msvc": "4.41.1",
+ "@rollup/rollup-win32-x64-msvc": "4.41.1",
+ "fsevents": "~2.3.2"
+ }
+ },
+ "node_modules/roughjs": {
+ "version": "4.6.6",
+ "resolved": "https://registry.npmjs.org/roughjs/-/roughjs-4.6.6.tgz",
+ "integrity": "sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==",
+ "license": "MIT",
+ "dependencies": {
+ "hachure-fill": "^0.5.2",
+ "path-data-parser": "^0.1.0",
+ "points-on-curve": "^0.2.0",
+ "points-on-path": "^0.2.1"
+ }
+ },
+ "node_modules/rrweb-cssom": {
+ "version": "0.8.0",
+ "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
+ "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==",
+ "license": "MIT"
+ },
+ "node_modules/rw": {
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz",
+ "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==",
+ "license": "BSD-3-Clause"
+ },
+ "node_modules/sade": {
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz",
+ "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==",
+ "license": "MIT",
+ "dependencies": {
+ "mri": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=6"
+ }
+ },
+ "node_modules/safer-buffer": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
+ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
+ "license": "MIT"
+ },
+ "node_modules/sanitize-html": {
+ "version": "2.17.0",
+ "resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.17.0.tgz",
+ "integrity": "sha512-dLAADUSS8rBwhaevT12yCezvioCA+bmUTPH/u57xKPT8d++voeYE6HeluA/bPbQ15TwDBG2ii+QZIEmYx8VdxA==",
+ "license": "MIT",
+ "dependencies": {
+ "deepmerge": "^4.2.2",
+ "escape-string-regexp": "^4.0.0",
+ "htmlparser2": "^8.0.0",
+ "is-plain-object": "^5.0.0",
+ "parse-srcset": "^1.0.2",
+ "postcss": "^8.3.11"
+ }
+ },
+ "node_modules/sass": {
+ "version": "1.89.1",
+ "resolved": "https://registry.npmjs.org/sass/-/sass-1.89.1.tgz",
+ "integrity": "sha512-eMLLkl+qz7tx/0cJ9wI+w09GQ2zodTkcE/aVfywwdlRcI3EO19xGnbmJwg/JMIm+5MxVJ6outddLZ4Von4E++Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "chokidar": "^4.0.0",
+ "immutable": "^5.0.2",
+ "source-map-js": ">=0.6.2 <2.0.0"
+ },
+ "bin": {
+ "sass": "sass.js"
+ },
+ "engines": {
+ "node": ">=14.0.0"
+ },
+ "optionalDependencies": {
+ "@parcel/watcher": "^2.4.1"
+ }
+ },
+ "node_modules/sax": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz",
+ "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/saxes": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz",
+ "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==",
+ "license": "ISC",
+ "dependencies": {
+ "xmlchars": "^2.2.0"
+ },
+ "engines": {
+ "node": ">=v12.22.7"
+ }
+ },
+ "node_modules/shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "shebang-regex": "^3.0.0"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/signal-exit": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
+ "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
+ "dev": true,
+ "license": "ISC",
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/source-map": {
+ "version": "0.7.4",
+ "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz",
+ "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==",
+ "dev": true,
+ "license": "BSD-3-Clause",
+ "engines": {
+ "node": ">= 8"
+ }
+ },
+ "node_modules/source-map-js": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
+ "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
+ "license": "BSD-3-Clause",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/string-width": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
+ "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "eastasianwidth": "^0.2.0",
+ "emoji-regex": "^9.2.2",
+ "strip-ansi": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "node_modules/string-width-cjs": {
+ "name": "string-width",
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/string-width-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/string-width-cjs/node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/string-width-cjs/node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi": {
+ "version": "7.1.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
+ "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/strip-ansi?sponsor=1"
+ }
+ },
+ "node_modules/strip-ansi-cjs": {
+ "name": "strip-ansi",
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/strip-ansi-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/stylis": {
+ "version": "4.3.6",
+ "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz",
+ "integrity": "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==",
+ "license": "MIT"
+ },
+ "node_modules/stylus": {
+ "version": "0.63.0",
+ "resolved": "https://registry.npmjs.org/stylus/-/stylus-0.63.0.tgz",
+ "integrity": "sha512-OMlgrTCPzE/ibtRMoeLVhOY0RcNuNWh0rhAVqeKnk/QwcuUKQbnqhZ1kg2vzD8VU/6h3FoPTq4RJPHgLBvX6Bw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@adobe/css-tools": "~4.3.3",
+ "debug": "^4.3.2",
+ "glob": "^7.1.6",
+ "sax": "~1.3.0",
+ "source-map": "^0.7.3"
+ },
+ "bin": {
+ "stylus": "bin/stylus"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "funding": {
+ "url": "https://opencollective.com/stylus"
+ }
+ },
+ "node_modules/sucrase": {
+ "version": "3.35.0",
+ "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz",
+ "integrity": "sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/gen-mapping": "^0.3.2",
+ "commander": "^4.0.0",
+ "glob": "^10.3.10",
+ "lines-and-columns": "^1.1.6",
+ "mz": "^2.7.0",
+ "pirates": "^4.0.1",
+ "ts-interface-checker": "^0.1.9"
+ },
+ "bin": {
+ "sucrase": "bin/sucrase",
+ "sucrase-node": "bin/sucrase-node"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ }
+ },
+ "node_modules/sucrase/node_modules/brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0"
+ }
+ },
+ "node_modules/sucrase/node_modules/commander": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz",
+ "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/sucrase/node_modules/glob": {
+ "version": "10.4.5",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
+ "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "foreground-child": "^3.1.0",
+ "jackspeak": "^3.1.2",
+ "minimatch": "^9.0.4",
+ "minipass": "^7.1.2",
+ "package-json-from-dist": "^1.0.0",
+ "path-scurry": "^1.11.1"
+ },
+ "bin": {
+ "glob": "dist/esm/bin.mjs"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/sucrase/node_modules/minimatch": {
+ "version": "9.0.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+ "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=16 || 14 >=14.17"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
+ }
+ },
+ "node_modules/sugarss": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/sugarss/-/sugarss-4.0.1.tgz",
+ "integrity": "sha512-WCjS5NfuVJjkQzK10s8WOBY+hhDxxNt/N6ZaGwxFZ+wN3/lKKFSaaKUNecULcTTvE4urLcKaZFQD8vO0mOZujw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/postcss/"
+ },
+ "peerDependencies": {
+ "postcss": "^8.3.3"
+ }
+ },
+ "node_modules/supports-preserve-symlinks-flag": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
+ "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/svelte": {
+ "version": "4.2.20",
+ "resolved": "https://registry.npmjs.org/svelte/-/svelte-4.2.20.tgz",
+ "integrity": "sha512-eeEgGc2DtiUil5ANdtd8vPwt9AgaMdnuUFnPft9F5oMvU/FHu5IHFic+p1dR/UOB7XU2mX2yHW+NcTch4DCh5Q==",
+ "license": "MIT",
+ "dependencies": {
+ "@ampproject/remapping": "^2.2.1",
+ "@jridgewell/sourcemap-codec": "^1.4.15",
+ "@jridgewell/trace-mapping": "^0.3.18",
+ "@types/estree": "^1.0.1",
+ "acorn": "^8.9.0",
+ "aria-query": "^5.3.0",
+ "axobject-query": "^4.0.0",
+ "code-red": "^1.0.3",
+ "css-tree": "^2.3.1",
+ "estree-walker": "^3.0.3",
+ "is-reference": "^3.0.1",
+ "locate-character": "^3.0.0",
+ "magic-string": "^0.30.4",
+ "periscopic": "^3.1.0"
+ },
+ "engines": {
+ "node": ">=16"
+ }
+ },
+ "node_modules/svelte-hmr": {
+ "version": "0.16.0",
+ "resolved": "https://registry.npmjs.org/svelte-hmr/-/svelte-hmr-0.16.0.tgz",
+ "integrity": "sha512-Gyc7cOS3VJzLlfj7wKS0ZnzDVdv3Pn2IuVeJPk9m2skfhcu5bq3wtIZyQGggr7/Iim5rH5cncyQft/kRLupcnA==",
+ "dev": true,
+ "license": "ISC",
+ "engines": {
+ "node": "^12.20 || ^14.13.1 || >= 16"
+ },
+ "peerDependencies": {
+ "svelte": "^3.19.0 || ^4.0.0"
+ }
+ },
+ "node_modules/svelte-i18n": {
+ "version": "3.7.4",
+ "resolved": "https://registry.npmjs.org/svelte-i18n/-/svelte-i18n-3.7.4.tgz",
+ "integrity": "sha512-yGRCNo+eBT4cPuU7IVsYTYjxB7I2V8qgUZPlHnNctJj5IgbJgV78flsRzpjZ/8iUYZrS49oCt7uxlU3AZv/N5Q==",
+ "license": "MIT",
+ "dependencies": {
+ "cli-color": "^2.0.3",
+ "deepmerge": "^4.2.2",
+ "esbuild": "^0.19.2",
+ "estree-walker": "^2",
+ "intl-messageformat": "^9.13.0",
+ "sade": "^1.8.1",
+ "tiny-glob": "^0.2.9"
+ },
+ "bin": {
+ "svelte-i18n": "dist/cli.js"
+ },
+ "engines": {
+ "node": ">= 16"
+ },
+ "peerDependencies": {
+ "svelte": "^3 || ^4"
+ }
+ },
+ "node_modules/svelte-i18n/node_modules/@esbuild/linux-loong64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz",
+ "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==",
+ "cpu": [
+ "loong64"
+ ],
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/svelte-i18n/node_modules/esbuild": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz",
+ "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==",
+ "hasInstallScript": true,
+ "license": "MIT",
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "optionalDependencies": {
+ "@esbuild/aix-ppc64": "0.19.12",
+ "@esbuild/android-arm": "0.19.12",
+ "@esbuild/android-arm64": "0.19.12",
+ "@esbuild/android-x64": "0.19.12",
+ "@esbuild/darwin-arm64": "0.19.12",
+ "@esbuild/darwin-x64": "0.19.12",
+ "@esbuild/freebsd-arm64": "0.19.12",
+ "@esbuild/freebsd-x64": "0.19.12",
+ "@esbuild/linux-arm": "0.19.12",
+ "@esbuild/linux-arm64": "0.19.12",
+ "@esbuild/linux-ia32": "0.19.12",
+ "@esbuild/linux-loong64": "0.19.12",
+ "@esbuild/linux-mips64el": "0.19.12",
+ "@esbuild/linux-ppc64": "0.19.12",
+ "@esbuild/linux-riscv64": "0.19.12",
+ "@esbuild/linux-s390x": "0.19.12",
+ "@esbuild/linux-x64": "0.19.12",
+ "@esbuild/netbsd-x64": "0.19.12",
+ "@esbuild/openbsd-x64": "0.19.12",
+ "@esbuild/sunos-x64": "0.19.12",
+ "@esbuild/win32-arm64": "0.19.12",
+ "@esbuild/win32-ia32": "0.19.12",
+ "@esbuild/win32-x64": "0.19.12"
+ }
+ },
+ "node_modules/svelte-preprocess": {
+ "version": "6.0.3",
+ "resolved": "https://registry.npmjs.org/svelte-preprocess/-/svelte-preprocess-6.0.3.tgz",
+ "integrity": "sha512-PLG2k05qHdhmRG7zR/dyo5qKvakhm8IJ+hD2eFRQmMLHp7X3eJnjeupUtvuRpbNiF31RjVw45W+abDwHEmP5OA==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 18.0.0"
+ },
+ "peerDependencies": {
+ "@babel/core": "^7.10.2",
+ "coffeescript": "^2.5.1",
+ "less": "^3.11.3 || ^4.0.0",
+ "postcss": "^7 || ^8",
+ "postcss-load-config": ">=3",
+ "pug": "^3.0.0",
+ "sass": "^1.26.8",
+ "stylus": ">=0.55",
+ "sugarss": "^2.0.0 || ^3.0.0 || ^4.0.0",
+ "svelte": "^4.0.0 || ^5.0.0-next.100 || ^5.0.0",
+ "typescript": "^5.0.0"
+ },
+ "peerDependenciesMeta": {
+ "@babel/core": {
+ "optional": true
+ },
+ "coffeescript": {
+ "optional": true
+ },
+ "less": {
+ "optional": true
+ },
+ "postcss": {
+ "optional": true
+ },
+ "postcss-load-config": {
+ "optional": true
+ },
+ "pug": {
+ "optional": true
+ },
+ "sass": {
+ "optional": true
+ },
+ "stylus": {
+ "optional": true
+ },
+ "sugarss": {
+ "optional": true
+ },
+ "typescript": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/svelte/node_modules/estree-walker": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz",
+ "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "^1.0.0"
+ }
+ },
+ "node_modules/symbol-tree": {
+ "version": "3.2.4",
+ "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz",
+ "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==",
+ "license": "MIT"
+ },
+ "node_modules/thenify": {
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz",
+ "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "any-promise": "^1.0.0"
+ }
+ },
+ "node_modules/thenify-all": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz",
+ "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "thenify": ">= 3.1.0 < 4"
+ },
+ "engines": {
+ "node": ">=0.8"
+ }
+ },
+ "node_modules/timers-ext": {
+ "version": "0.1.8",
+ "resolved": "https://registry.npmjs.org/timers-ext/-/timers-ext-0.1.8.tgz",
+ "integrity": "sha512-wFH7+SEAcKfJpfLPkrgMPvvwnEtj8W4IurvEyrKsDleXnKLCDw71w8jltvfLa8Rm4qQxxT4jmDBYbJG/z7qoww==",
+ "license": "ISC",
+ "dependencies": {
+ "es5-ext": "^0.10.64",
+ "next-tick": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=0.12"
+ }
+ },
+ "node_modules/tiny-glob": {
+ "version": "0.2.9",
+ "resolved": "https://registry.npmjs.org/tiny-glob/-/tiny-glob-0.2.9.tgz",
+ "integrity": "sha512-g/55ssRPUjShh+xkfx9UPDXqhckHEsHr4Vd9zX55oSdGZc/MD0m3sferOkwWtp98bv+kcVfEHtRJgBVJzelrzg==",
+ "license": "MIT",
+ "dependencies": {
+ "globalyzer": "0.1.0",
+ "globrex": "^0.1.2"
+ }
+ },
+ "node_modules/tinyexec": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz",
+ "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==",
+ "license": "MIT"
+ },
+ "node_modules/tldts": {
+ "version": "6.1.86",
+ "resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.86.tgz",
+ "integrity": "sha512-WMi/OQ2axVTf/ykqCQgXiIct+mSQDFdH2fkwhPwgEwvJ1kSzZRiinb0zF2Xb8u4+OqPChmyI6MEu4EezNJz+FQ==",
+ "license": "MIT",
+ "dependencies": {
+ "tldts-core": "^6.1.86"
+ },
+ "bin": {
+ "tldts": "bin/cli.js"
+ }
+ },
+ "node_modules/tldts-core": {
+ "version": "6.1.86",
+ "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-6.1.86.tgz",
+ "integrity": "sha512-Je6p7pkk+KMzMv2XXKmAE3McmolOQFdxkKw0R8EYNr7sELW46JqnNeTX8ybPiQgvg1ymCoF8LXs5fzFaZvJPTA==",
+ "license": "MIT"
+ },
+ "node_modules/to-regex-range": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+ "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "dependencies": {
+ "is-number": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=8.0"
+ }
+ },
+ "node_modules/token-stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-1.0.0.tgz",
+ "integrity": "sha512-VSsyNPPW74RpHwR8Fc21uubwHY7wMDeJLys2IX5zJNih+OnAnaifKHo+1LHT7DAdloQ7apeaaWg8l7qnf/TnEg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/tough-cookie": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-5.1.2.tgz",
+ "integrity": "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "tldts": "^6.1.32"
+ },
+ "engines": {
+ "node": ">=16"
+ }
+ },
+ "node_modules/tr46": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
+ "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
+ "license": "MIT",
+ "dependencies": {
+ "punycode": "^2.3.1"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/ts-dedent": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz",
+ "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=6.10"
+ }
+ },
+ "node_modules/ts-interface-checker": {
+ "version": "0.1.13",
+ "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz",
+ "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==",
+ "dev": true,
+ "license": "Apache-2.0"
+ },
+ "node_modules/tslib": {
+ "version": "2.8.1",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
+ "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
+ "license": "0BSD"
+ },
+ "node_modules/type": {
+ "version": "2.7.3",
+ "resolved": "https://registry.npmjs.org/type/-/type-2.7.3.tgz",
+ "integrity": "sha512-8j+1QmAbPvLZow5Qpi6NCaN8FB60p/6x8/vfNqOk/hC+HuvFZhL4+WfekuhQLiqFZXOgQdrs3B+XxEmCc6b3FQ==",
+ "license": "ISC"
+ },
+ "node_modules/typescript": {
+ "version": "5.8.3",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz",
+ "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=14.17"
+ }
+ },
+ "node_modules/ufo": {
+ "version": "1.6.1",
+ "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz",
+ "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==",
+ "license": "MIT"
+ },
+ "node_modules/uuid": {
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz",
+ "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==",
+ "funding": [
+ "https://github.com/sponsors/broofa",
+ "https://github.com/sponsors/ctavan"
+ ],
+ "license": "MIT",
+ "bin": {
+ "uuid": "dist/esm/bin/uuid"
+ }
+ },
+ "node_modules/vite": {
+ "version": "5.4.19",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.19.tgz",
+ "integrity": "sha512-qO3aKv3HoQC8QKiNSTuUM1l9o/XX3+c+VTgLHbJWHZGeTPVAg2XwazI9UWzoxjIJCGCV2zU60uqMzjeLZuULqA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "esbuild": "^0.21.3",
+ "postcss": "^8.4.43",
+ "rollup": "^4.20.0"
+ },
+ "bin": {
+ "vite": "bin/vite.js"
+ },
+ "engines": {
+ "node": "^18.0.0 || >=20.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/vitejs/vite?sponsor=1"
+ },
+ "optionalDependencies": {
+ "fsevents": "~2.3.3"
+ },
+ "peerDependencies": {
+ "@types/node": "^18.0.0 || >=20.0.0",
+ "less": "*",
+ "lightningcss": "^1.21.0",
+ "sass": "*",
+ "sass-embedded": "*",
+ "stylus": "*",
+ "sugarss": "*",
+ "terser": "^5.4.0"
+ },
+ "peerDependenciesMeta": {
+ "@types/node": {
+ "optional": true
+ },
+ "less": {
+ "optional": true
+ },
+ "lightningcss": {
+ "optional": true
+ },
+ "sass": {
+ "optional": true
+ },
+ "sass-embedded": {
+ "optional": true
+ },
+ "stylus": {
+ "optional": true
+ },
+ "sugarss": {
+ "optional": true
+ },
+ "terser": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/aix-ppc64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
+ "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/android-arm": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
+ "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/android-arm64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
+ "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/android-x64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
+ "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/darwin-arm64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
+ "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/darwin-x64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
+ "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
+ "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/freebsd-x64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
+ "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-arm": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
+ "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-arm64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
+ "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-ia32": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
+ "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-loong64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
+ "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-mips64el": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
+ "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-ppc64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
+ "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-riscv64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
+ "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-s390x": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
+ "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/linux-x64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
+ "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/netbsd-x64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
+ "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/openbsd-x64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
+ "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/sunos-x64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
+ "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/win32-arm64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
+ "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/win32-ia32": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
+ "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/@esbuild/win32-x64": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
+ "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/vite/node_modules/esbuild": {
+ "version": "0.21.5",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
+ "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "optionalDependencies": {
+ "@esbuild/aix-ppc64": "0.21.5",
+ "@esbuild/android-arm": "0.21.5",
+ "@esbuild/android-arm64": "0.21.5",
+ "@esbuild/android-x64": "0.21.5",
+ "@esbuild/darwin-arm64": "0.21.5",
+ "@esbuild/darwin-x64": "0.21.5",
+ "@esbuild/freebsd-arm64": "0.21.5",
+ "@esbuild/freebsd-x64": "0.21.5",
+ "@esbuild/linux-arm": "0.21.5",
+ "@esbuild/linux-arm64": "0.21.5",
+ "@esbuild/linux-ia32": "0.21.5",
+ "@esbuild/linux-loong64": "0.21.5",
+ "@esbuild/linux-mips64el": "0.21.5",
+ "@esbuild/linux-ppc64": "0.21.5",
+ "@esbuild/linux-riscv64": "0.21.5",
+ "@esbuild/linux-s390x": "0.21.5",
+ "@esbuild/linux-x64": "0.21.5",
+ "@esbuild/netbsd-x64": "0.21.5",
+ "@esbuild/openbsd-x64": "0.21.5",
+ "@esbuild/sunos-x64": "0.21.5",
+ "@esbuild/win32-arm64": "0.21.5",
+ "@esbuild/win32-ia32": "0.21.5",
+ "@esbuild/win32-x64": "0.21.5"
+ }
+ },
+ "node_modules/vitefu": {
+ "version": "0.2.5",
+ "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-0.2.5.tgz",
+ "integrity": "sha512-SgHtMLoqaeeGnd2evZ849ZbACbnwQCIwRH57t18FxcXoZop0uQu0uzlIhJBlF/eWVzuce0sHeqPcDo+evVcg8Q==",
+ "dev": true,
+ "license": "MIT",
+ "peerDependencies": {
+ "vite": "^3.0.0 || ^4.0.0 || ^5.0.0"
+ },
+ "peerDependenciesMeta": {
+ "vite": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/void-elements": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz",
+ "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/vscode-jsonrpc": {
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz",
+ "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
+ "node_modules/vscode-languageserver": {
+ "version": "9.0.1",
+ "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz",
+ "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==",
+ "license": "MIT",
+ "dependencies": {
+ "vscode-languageserver-protocol": "3.17.5"
+ },
+ "bin": {
+ "installServerIntoExtension": "bin/installServerIntoExtension"
+ }
+ },
+ "node_modules/vscode-languageserver-protocol": {
+ "version": "3.17.5",
+ "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz",
+ "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==",
+ "license": "MIT",
+ "dependencies": {
+ "vscode-jsonrpc": "8.2.0",
+ "vscode-languageserver-types": "3.17.5"
+ }
+ },
+ "node_modules/vscode-languageserver-textdocument": {
+ "version": "1.0.12",
+ "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz",
+ "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==",
+ "license": "MIT"
+ },
+ "node_modules/vscode-languageserver-types": {
+ "version": "3.17.5",
+ "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz",
+ "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==",
+ "license": "MIT"
+ },
+ "node_modules/vscode-uri": {
+ "version": "3.0.8",
+ "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz",
+ "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==",
+ "license": "MIT"
+ },
+ "node_modules/w3c-xmlserializer": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz",
+ "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==",
+ "license": "MIT",
+ "dependencies": {
+ "xml-name-validator": "^5.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/webidl-conversions": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
+ "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
+ "license": "BSD-2-Clause",
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/whatwg-encoding": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz",
+ "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
+ "license": "MIT",
+ "dependencies": {
+ "iconv-lite": "0.6.3"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/whatwg-mimetype": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
+ "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/whatwg-url": {
+ "version": "14.2.0",
+ "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
+ "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
+ "license": "MIT",
+ "dependencies": {
+ "tr46": "^5.1.0",
+ "webidl-conversions": "^7.0.0"
+ },
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/which": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
+ "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "isexe": "^3.1.1"
+ },
+ "bin": {
+ "node-which": "bin/which.js"
+ },
+ "engines": {
+ "node": "^16.13.0 || >=18.0.0"
+ }
+ },
+ "node_modules/with": {
+ "version": "7.0.2",
+ "resolved": "https://registry.npmjs.org/with/-/with-7.0.2.tgz",
+ "integrity": "sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@babel/parser": "^7.9.6",
+ "@babel/types": "^7.9.6",
+ "assert-never": "^1.2.1",
+ "babel-walk": "3.0.0-canary-5"
+ },
+ "engines": {
+ "node": ">= 10.0.0"
+ }
+ },
+ "node_modules/wrap-ansi": {
+ "version": "8.1.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
+ "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-styles": "^6.1.0",
+ "string-width": "^5.0.1",
+ "strip-ansi": "^7.0.1"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/wrap-ansi-cjs": {
+ "name": "wrap-ansi",
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "engines": {
+ "node": ">=10"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "color-convert": "^2.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "ansi-regex": "^5.0.1"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
+ "node_modules/wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
+ "dev": true,
+ "license": "ISC"
+ },
+ "node_modules/ws": {
+ "version": "8.18.2",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.2.tgz",
+ "integrity": "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=10.0.0"
+ },
+ "peerDependencies": {
+ "bufferutil": "^4.0.1",
+ "utf-8-validate": ">=5.0.2"
+ },
+ "peerDependenciesMeta": {
+ "bufferutil": {
+ "optional": true
+ },
+ "utf-8-validate": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/xml-name-validator": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz",
+ "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/xmlchars": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz",
+ "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
+ "license": "MIT"
+ },
+ "node_modules/yootils": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/yootils/-/yootils-0.3.1.tgz",
+ "integrity": "sha512-A7AMeJfGefk317I/3tBoUYRcDcNavKEkpiPN/nQsBz/viI2GvT7BtrqdPD6rGqBFN8Ax7v4obf+Cl32JF9DDVw==",
+ "dev": true,
+ "license": "MIT"
+ }
+ }
+}
diff --git a/frontend/package.json b/frontend/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..61148c530e2ed3843a0891e2fae8b4e507e39d7a
--- /dev/null
+++ b/frontend/package.json
@@ -0,0 +1,40 @@
+{
+ "name": "gradio_labanmovementanalysis",
+ "version": "0.4.22",
+ "description": "Gradio UI packages",
+ "type": "module",
+ "author": "",
+ "license": "ISC",
+ "private": false,
+ "main_changeset": true,
+ "exports": {
+ ".": {
+ "gradio": "./Index.svelte",
+ "svelte": "./dist/Index.svelte",
+ "types": "./dist/Index.svelte.d.ts"
+ },
+ "./example": {
+ "gradio": "./Example.svelte",
+ "svelte": "./dist/Example.svelte",
+ "types": "./dist/Example.svelte.d.ts"
+ },
+ "./package.json": "./package.json"
+ },
+ "dependencies": {
+ "@gradio/atoms": "0.16.1",
+ "@gradio/statustracker": "0.10.12",
+ "@gradio/utils": "0.10.2",
+ "@zerodevx/svelte-json-view": "^1.0.7"
+ },
+ "devDependencies": {
+ "@gradio/preview": "0.13.0"
+ },
+ "peerDependencies": {
+ "svelte": "^4.0.0"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/gradio-app/gradio.git",
+ "directory": "js/fallback"
+ }
+}
\ No newline at end of file
diff --git a/mcp.json b/mcp.json
new file mode 100644
index 0000000000000000000000000000000000000000..2edcb24bf6f23fa4ba01f77af4bd4c6ed37b6e47
--- /dev/null
+++ b/mcp.json
@@ -0,0 +1,57 @@
+{
+ "mcpServers": {
+ "laban-movement-analysis": {
+ "command": "python",
+ "args": ["-m", "backend.mcp_server"],
+ "env": {
+ "PYTHONPATH": "."
+ },
+ "schema": {
+ "name": "Laban Movement Analysis",
+ "description": "Analyze human movement in videos using pose estimation and Laban Movement Analysis metrics",
+ "version": "1.0.0",
+ "tools": [
+ {
+ "name": "analyze_video",
+ "description": "Analyze movement in a video file",
+ "parameters": {
+ "video_path": "string",
+ "model": "string (optional)",
+ "enable_visualization": "boolean (optional)",
+ "include_keypoints": "boolean (optional)"
+ }
+ },
+ {
+ "name": "get_analysis_summary",
+ "description": "Get human-readable summary of analysis",
+ "parameters": {
+ "analysis_id": "string"
+ }
+ },
+ {
+ "name": "list_available_models",
+ "description": "List available pose estimation models",
+ "parameters": {}
+ },
+ {
+ "name": "batch_analyze",
+ "description": "Analyze multiple videos in batch",
+ "parameters": {
+ "video_paths": "array of strings",
+ "model": "string (optional)",
+ "parallel": "boolean (optional)"
+ }
+ },
+ {
+ "name": "compare_movements",
+ "description": "Compare movement patterns between videos",
+ "parameters": {
+ "analysis_id1": "string",
+ "analysis_id2": "string"
+ }
+ }
+ ]
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..7bb35488ee6e7a368235bc16a972ec87c030094f
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,62 @@
+[build-system]
+requires = [
+ "hatchling",
+ "hatch-requirements-txt",
+ "hatch-fancy-pypi-readme>=22.5.0",
+]
+build-backend = "hatchling.build"
+
+[project]
+name = "gradio_labanmovementanalysis"
+version = "0.0.2"
+description = "A Gradio 5 component for video movement analysis using Laban Movement Analysis (LMA) with MCP support for AI agents"
+readme = "README.md"
+license = "apache-2.0"
+requires-python = ">=3.10"
+authors = [{ name = "Csaba BolyΓ³s", email = "bladeszasza@gmail.com" }]
+keywords = ["gradio-custom-component", "gradio-5", "laban-movement-analysis", "LMA", "pose-estimation", "movement-analysis", "mcp", "ai-agents", "webrtc"]
+# Core dependencies
+dependencies = [
+ "gradio>=5.0.0",
+ "opencv-python>=4.8.0",
+ "numpy>=1.24.0",
+ "mediapipe>=0.10.21",
+ "tensorflow>=2.13.0",
+ "ultralytics>=8.0.0",
+ "torch>=2.0.0",
+ "torchvision>=0.15.0"
+]
+classifiers = [
+ 'Development Status :: 4 - Beta',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3 :: Only',
+ 'Programming Language :: Python :: 3.10',
+ 'Programming Language :: Python :: 3.11',
+ 'Programming Language :: Python :: 3.12',
+ 'Topic :: Scientific/Engineering',
+ 'Topic :: Scientific/Engineering :: Artificial Intelligence',
+ 'Topic :: Scientific/Engineering :: Visualization',
+]
+
+# The repository and space URLs are optional, but recommended.
+# Adding a repository URL will create a badge in the auto-generated README that links to the repository.
+# Adding a space URL will create a badge in the auto-generated README that links to the space.
+# This will make it easy for people to find your deployed demo or source code when they
+# encounter your project in the wild.
+
+# [project.urls]
+# repository = "your github repository"
+# space = "your space url"
+
+[project.optional-dependencies]
+dev = ["build", "twine"]
+mcp = ["mcp>=1.0.0", "aiofiles>=23.0.0", "httpx>=0.24.0"]
+agent = ["gradio-client>=1.0.0"]
+all = ["gradio_labanmovementanalysis[mcp,agent]"]
+
+[tool.hatch.build]
+artifacts = ["/backend/gradio_labanmovementanalysis/templates", "*.pyi"]
+
+[tool.hatch.build.targets.wheel]
+packages = ["/backend/gradio_labanmovementanalysis"]
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..9a0820fe6bbb871869d85078f75b0344c894472d
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,49 @@
+# Laban Movement Analysis - Complete Suite
+# Created by: Csaba BolyΓ³s (BladeSzaSza)
+# Heavy Beta Version
+
+# Core Gradio and UI
+gradio>=5.0.0
+gradio-webrtc>=0.0.31
+
+# Computer Vision and Pose Estimation
+opencv-python>=4.8.0
+mediapipe>=0.10.21
+ultralytics>=8.0.0
+
+# Scientific Computing
+numpy>=1.21.0,<2.0.0
+scipy>=1.7.0
+pandas>=1.3.0
+
+# Image and Video Processing
+Pillow>=8.3.0
+imageio>=2.19.0
+imageio-ffmpeg>=0.4.7
+moviepy>=1.0.3
+
+# Machine Learning
+torch>=2.0.0
+torchvision>=0.15.0
+tensorflow>=2.10.0
+
+# WebRTC and Streaming
+twilio>=8.2.0
+aiortc>=1.4.0
+av>=10.0.0
+
+# Utilities
+requests>=2.28.0
+yt-dlp>=2023.1.6
+tqdm>=4.64.0
+matplotlib>=3.5.0
+seaborn>=0.11.0
+
+# Development and Deployment
+python-multipart>=0.0.5
+uvicorn>=0.18.0
+fastapi>=0.95.0
+
+# Optional WebRTC dependencies
+aiohttp>=3.8.0
+websockets>=10.0
\ No newline at end of file
diff --git a/run_mcp_server.bat b/run_mcp_server.bat
new file mode 100644
index 0000000000000000000000000000000000000000..ac47b331660559d51a30fd296c75b453b6807ec1
--- /dev/null
+++ b/run_mcp_server.bat
@@ -0,0 +1,28 @@
+@echo off
+REM Run script for Laban Movement Analysis MCP Server (Windows)
+
+echo π Starting Laban Movement Analysis MCP Server...
+echo.
+
+REM Check if virtual environment exists
+if exist "venv\Scripts\activate.bat" (
+ echo Activating virtual environment...
+ call venv\Scripts\activate.bat
+) else if exist ".venv\Scripts\activate.bat" (
+ echo Activating virtual environment...
+ call .venv\Scripts\activate.bat
+)
+
+REM Install dependencies if needed
+echo Checking dependencies...
+pip install -q -r backend\requirements-mcp.txt
+
+REM Set Python path
+set PYTHONPATH=%PYTHONPATH%;%cd%
+
+REM Run MCP server
+echo.
+echo Starting MCP server...
+echo Use Ctrl+C to stop the server
+echo.
+python -m backend.mcp_server
\ No newline at end of file
diff --git a/run_mcp_server.sh b/run_mcp_server.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3f5d0c8154f845dc5f4a47f8f2c929ba12c38743
--- /dev/null
+++ b/run_mcp_server.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+# Run script for Laban Movement Analysis MCP Server
+
+echo "π Starting Laban Movement Analysis MCP Server..."
+echo ""
+
+# Check if virtual environment exists
+if [ -d "venv" ]; then
+ echo "Activating virtual environment..."
+ source venv/bin/activate
+elif [ -d ".venv" ]; then
+ echo "Activating virtual environment..."
+ source .venv/bin/activate
+fi
+
+# Install dependencies if needed
+echo "Checking dependencies..."
+pip install -q -r backend/requirements-mcp.txt
+
+# Set Python path
+export PYTHONPATH="${PYTHONPATH}:$(pwd)"
+
+# Run MCP server
+echo ""
+echo "Starting MCP server..."
+echo "Use Ctrl+C to stop the server"
+echo ""
+python -m backend.mcp_server
\ No newline at end of file
diff --git a/version.py b/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..b717d21697216bf31aeb830c79d0004d2f5b5321
--- /dev/null
+++ b/version.py
@@ -0,0 +1,54 @@
+"""
+Laban Movement Analysis - Complete Suite
+Version Information
+
+Created by: Csaba BolyΓ³s (BladeSzaSza)
+Contact: bladeszasza@gmail.com
+GitHub: https://github.com/bladeszasza
+LinkedIn: https://www.linkedin.com/in/csaba-bolyΓ³s-00a11767/
+Hugging Face: https://huggingface.co/BladeSzaSza
+"""
+
+__version__ = "0.01-beta"
+__author__ = "Csaba BolyΓ³s (BladeSzaSza)"
+__email__ = "bladeszasza@gmail.com"
+__description__ = "Professional movement analysis with pose estimation, AI action recognition, real-time processing, and agent automation"
+__url__ = "https://huggingface.co/spaces/BladeSzaSza/laban-movement-analysis"
+
+# Release Information
+RELEASE_NOTES = """
+π Laban Movement Analysis - Complete Suite v0.01-beta
+
+β¨ INITIAL BETA RELEASE β¨
+
+π Core Features:
+- 17+ Pose Estimation Models (MediaPipe, MoveNet, YOLO v8/v11 with x variants)
+- YouTube & Vimeo URL Support
+- Real-time WebRTC Camera Analysis
+- Agent API with MCP Integration
+- Batch Processing & Movement Filtering
+- Professional VIRIDIAN UI Theme
+
+π Technical Stack:
+- Gradio 5.0+ Frontend
+- OpenCV + MediaPipe + Ultralytics YOLO
+- WebRTC Streaming Technology
+- FastAPI Backend Integration
+
+β οΈ Beta Status:
+This is a heavy beta release. Features are actively being developed and refined.
+Report issues at: https://github.com/bladeszasza/labanmovementanalysis
+
+Created with β€οΈ by Csaba BolyΓ³s
+"""
+
+def get_version_info():
+ """Get detailed version information."""
+ return {
+ "version": __version__,
+ "author": __author__,
+ "email": __email__,
+ "description": __description__,
+ "url": __url__,
+ "release_notes": RELEASE_NOTES
+ }
\ No newline at end of file