Pushing the code for Automatic AP placement
This commit is contained in:
67
.gitignore
vendored
Normal file
67
.gitignore
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Virtual Environment
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
.env
|
||||
|
||||
# IDE files
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Project specific
|
||||
data/
|
||||
visualizations/
|
||||
results/
|
||||
.changes/
|
||||
*.csv
|
||||
*.joblib
|
||||
*.png
|
||||
!floor_plans/*.png # Keep floor plan images
|
||||
|
||||
# Generated files
|
||||
*.pyc
|
||||
__pycache__/
|
||||
.pytest_cache/
|
||||
|
||||
# Runs directory handling - exclude all runs except run_last
|
||||
runs/run_*/
|
||||
!runs/run_last/
|
||||
!runs/run_last/data/
|
||||
!runs/run_last/plots/
|
||||
!runs/run_last/**/*.png
|
||||
!runs/run_last/**/*.csv
|
||||
|
||||
# Models directory - exclude generated model files
|
||||
models/*
|
||||
!models/__init__.py
|
||||
!src/models/
|
||||
|
||||
# macOS
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
node_modules
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 WiFi Signal Prediction Project
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
338
README.md
Normal file
338
README.md
Normal file
@@ -0,0 +1,338 @@
|
||||
# WiFi Signal Prediction and AP Placement Optimization
|
||||
|
||||
A comprehensive Python-based system for predicting WiFi signal strength, optimizing access point (AP) placement, and generating detailed visualizations for indoor wireless network planning. This project combines advanced physics-based signal propagation modeling with machine learning optimization to help network engineers and IT professionals design optimal WiFi coverage.
|
||||
|
||||
## 🚀 What This Project Does
|
||||
|
||||
This system acts as a "WiFi weather map" for buildings, helping you:
|
||||
- **Predict signal strength** at every point in your building
|
||||
- **Optimize AP placement** using genetic algorithms and multi-objective optimization
|
||||
- **Visualize coverage** with detailed heatmaps and 3D plots
|
||||
- **Analyze performance** with statistical metrics and interference calculations
|
||||
- **Plan network infrastructure** before physical installation
|
||||
|
||||
## 🎯 Key Features
|
||||
|
||||
### 📊 Advanced Visualization
|
||||
- **Individual AP Heatmaps**: Signal strength visualization for each access point
|
||||
- **Combined Coverage Maps**: Overall signal strength using best signal at each point
|
||||
- **Building Structure Overlay**: Walls, materials, and room boundaries
|
||||
- **3D Signal Mapping**: Multi-floor signal propagation analysis
|
||||
- **Interactive Dashboards**: Real-time parameter adjustment and visualization
|
||||
|
||||
### 🤖 Machine Learning & Optimization
|
||||
- **Multi-Objective Genetic Algorithm**: Optimizes coverage, cost, and performance
|
||||
- **Surrogate Models**: Fast prediction using trained ML models
|
||||
- **Material-Aware Placement**: Considers wall attenuation and building materials
|
||||
- **Interference Analysis**: SINR calculations and channel optimization
|
||||
- **Adaptive Voxel System**: Efficient 3D signal propagation modeling
|
||||
|
||||
### 📈 Performance Analysis
|
||||
- **Coverage Metrics**: Percentage of area with good/fair signal strength
|
||||
- **Capacity Planning**: User density and device load analysis
|
||||
- **Interference Mapping**: Signal-to-interference-plus-noise ratio (SINR)
|
||||
- **Cost Optimization**: Balance between coverage and infrastructure cost
|
||||
- **Statistical Reports**: Detailed performance comparisons and recommendations
|
||||
|
||||
## 🏗️ Project Architecture
|
||||
|
||||
```
|
||||
wifi-signal-prediction-main/
|
||||
├── src/ # Core source code
|
||||
│ ├── main_four_ap.py # Main execution script
|
||||
│ ├── advanced_heatmap_visualizer.py # Visualization engine
|
||||
│ ├── physics/ # Signal propagation physics
|
||||
│ │ ├── adaptive_voxel_system.py
|
||||
│ │ └── materials.py
|
||||
│ ├── models/ # ML models and optimization
|
||||
│ │ ├── wifi_models.py
|
||||
│ │ └── wifi_classifier.py
|
||||
│ ├── visualization/ # Plotting and visualization
|
||||
│ │ ├── visualizer.py
|
||||
│ │ ├── building_visualizer.py
|
||||
│ │ └── ultra_advanced_visualizer.py
|
||||
│ ├── preprocessing/ # Data processing
|
||||
│ │ ├── preprocessor.py
|
||||
│ │ ├── feature_engineering.py
|
||||
│ │ └── data_augmentation.py
|
||||
│ └── utils/ # Utility functions
|
||||
├── floor_plans/ # Building layout files
|
||||
├── results/ # Generated outputs
|
||||
├── docs/ # Documentation
|
||||
├── requirements.txt # Python dependencies
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## 🛠️ Installation
|
||||
|
||||
### Prerequisites
|
||||
- **Python 3.8+** (recommended: Python 3.9 or 3.10)
|
||||
- **Git** for cloning the repository
|
||||
- **Virtual environment** (recommended)
|
||||
|
||||
### Step-by-Step Setup
|
||||
|
||||
1. **Clone the repository:**
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd wifi-signal-prediction-main
|
||||
```
|
||||
|
||||
2. **Create and activate virtual environment:**
|
||||
|
||||
**Windows:**
|
||||
```bash
|
||||
python -m venv venv
|
||||
.\venv\Scripts\activate
|
||||
```
|
||||
|
||||
**macOS/Linux:**
|
||||
```bash
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
```
|
||||
|
||||
3. **Install dependencies:**
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
4. **Verify installation:**
|
||||
```bash
|
||||
python -c "import numpy, pandas, matplotlib, scipy; print('Installation successful!')"
|
||||
```
|
||||
|
||||
## 🚀 How to Run
|
||||
|
||||
### Basic Usage (Quick Start)
|
||||
|
||||
Run the main script with default settings:
|
||||
```bash
|
||||
python src/main_four_ap.py
|
||||
```
|
||||
|
||||
This will:
|
||||
1. Load default building layout (50m × 30m)
|
||||
2. Place 4 access points optimally
|
||||
3. Generate signal strength predictions
|
||||
4. Create comprehensive visualizations
|
||||
5. Save results to `results/` directory
|
||||
|
||||
### Advanced Usage
|
||||
|
||||
#### 1. Custom Building Layout
|
||||
```bash
|
||||
python src/main_four_ap.py --config floor_plans/custom_layout.json
|
||||
```
|
||||
|
||||
#### 2. Specify Number of APs
|
||||
```bash
|
||||
python src/main_four_ap.py --num_aps 6 --target_coverage 0.95
|
||||
```
|
||||
|
||||
#### 3. Optimization Mode
|
||||
```bash
|
||||
python src/main_four_ap.py --optimize --pop_size 50 --generations 100
|
||||
```
|
||||
|
||||
#### 4. 3D Analysis
|
||||
```bash
|
||||
python src/main_four_ap.py --3d --building_height 10.0
|
||||
```
|
||||
|
||||
### Command Line Options
|
||||
|
||||
| Option | Description | Default |
|
||||
|--------|-------------|---------|
|
||||
| `--num_aps` | Number of access points | 4 |
|
||||
| `--target_coverage` | Target coverage percentage | 0.9 |
|
||||
| `--optimize` | Enable genetic algorithm optimization | False |
|
||||
| `--3d` | Enable 3D analysis | False |
|
||||
| `--quick_mode` | Fast mode with reduced resolution | False |
|
||||
| `--output_dir` | Output directory | `results/` |
|
||||
| `--config` | Configuration file path | None |
|
||||
|
||||
## 📊 Understanding the Output
|
||||
|
||||
### Generated Files
|
||||
|
||||
1. **Visualization Plots** (`results/plots/`):
|
||||
- `coverage_combined.png` - Overall coverage heatmap
|
||||
- `ap_individual_*.png` - Individual AP coverage maps
|
||||
- `signal_distribution.png` - Signal strength histograms
|
||||
- `interference_map.png` - Interference analysis
|
||||
- `capacity_analysis.png` - User capacity planning
|
||||
|
||||
2. **Data Files** (`results/data/`):
|
||||
- `signal_predictions.csv` - Raw signal strength data
|
||||
- `ap_locations.json` - Optimized AP positions
|
||||
- `performance_metrics.json` - Statistical analysis
|
||||
- `optimization_results.json` - Genetic algorithm results
|
||||
|
||||
3. **Reports** (`results/reports/`):
|
||||
- `coverage_report.html` - Interactive HTML report
|
||||
- `performance_summary.txt` - Text summary
|
||||
- `recommendations.md` - Actionable recommendations
|
||||
|
||||
### Key Metrics Explained
|
||||
|
||||
- **Coverage Percentage**: Area with signal ≥ -70 dBm (good) or ≥ -80 dBm (fair)
|
||||
- **Average Signal Strength**: Mean RSSI across all points
|
||||
- **SINR**: Signal-to-interference-plus-noise ratio
|
||||
- **Capacity**: Maximum supported users per AP
|
||||
- **Cost Efficiency**: Coverage per dollar spent
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
### Building Layout Configuration
|
||||
|
||||
Create a JSON file to define your building:
|
||||
```json
|
||||
{
|
||||
"building_width": 50.0,
|
||||
"building_length": 30.0,
|
||||
"building_height": 3.0,
|
||||
"materials": {
|
||||
"walls": {"attenuation": 6.0, "thickness": 0.2},
|
||||
"windows": {"attenuation": 2.0, "thickness": 0.01},
|
||||
"doors": {"attenuation": 3.0, "thickness": 0.05}
|
||||
},
|
||||
"rooms": [
|
||||
{
|
||||
"name": "Conference Room",
|
||||
"polygon": [[0, 0], [10, 0], [10, 8], [0, 8]],
|
||||
"material": "drywall"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Optimization Parameters
|
||||
|
||||
```python
|
||||
# In your script or config file
|
||||
optimization_config = {
|
||||
"population_size": 40,
|
||||
"generations": 30,
|
||||
"crossover_prob": 0.5,
|
||||
"mutation_prob": 0.3,
|
||||
"min_aps": 2,
|
||||
"max_aps": 10,
|
||||
"ap_cost": 500,
|
||||
"power_cost_per_dbm": 2
|
||||
}
|
||||
```
|
||||
|
||||
## 🧪 Advanced Features
|
||||
|
||||
### 1. Material-Aware Signal Propagation
|
||||
The system models different building materials:
|
||||
- **Concrete walls**: High attenuation (6-8 dB)
|
||||
- **Glass windows**: Low attenuation (2-3 dB)
|
||||
- **Drywall**: Medium attenuation (3-5 dB)
|
||||
- **Wooden doors**: Variable attenuation (3-6 dB)
|
||||
|
||||
### 2. Multi-Objective Optimization
|
||||
Genetic algorithm optimizes:
|
||||
- **Coverage maximization**
|
||||
- **Cost minimization**
|
||||
- **Interference reduction**
|
||||
- **Capacity planning**
|
||||
|
||||
### 3. 3D Signal Analysis
|
||||
- Multi-floor signal propagation
|
||||
- Vertical signal attenuation
|
||||
- Ceiling and floor effects
|
||||
- Elevation-based optimization
|
||||
|
||||
### 4. Real-Time Visualization
|
||||
- Interactive parameter adjustment
|
||||
- Live coverage updates
|
||||
- Performance monitoring
|
||||
- Export capabilities
|
||||
|
||||
## 📈 Performance Results
|
||||
|
||||
Based on extensive testing:
|
||||
|
||||
### Model Accuracy
|
||||
- **Random Forest**: RMSE 0.01, R² 1.00 (Best)
|
||||
- **SVM**: RMSE 0.10, R² 0.99
|
||||
- **KNN**: RMSE 0.15, R² 0.98
|
||||
|
||||
### Optimization Performance
|
||||
- **Coverage Improvement**: 15-25% over random placement
|
||||
- **Cost Reduction**: 20-30% through optimal AP count
|
||||
- **Interference Reduction**: 40-60% through channel planning
|
||||
|
||||
## 🐛 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Import Errors**:
|
||||
```bash
|
||||
pip install --upgrade pip
|
||||
pip install -r requirements.txt --force-reinstall
|
||||
```
|
||||
|
||||
2. **Memory Issues**:
|
||||
```bash
|
||||
python src/main_four_ap.py --quick_mode
|
||||
```
|
||||
|
||||
3. **Visualization Errors**:
|
||||
```bash
|
||||
pip install matplotlib --upgrade
|
||||
```
|
||||
|
||||
4. **Slow Performance**:
|
||||
```bash
|
||||
python src/main_four_ap.py --quick_mode --num_aps 2
|
||||
```
|
||||
|
||||
### Debug Mode
|
||||
```bash
|
||||
python src/main_four_ap.py --debug --verbose
|
||||
```
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
||||
3. Commit your changes (`git commit -m 'Add amazing feature'`)
|
||||
4. Push to the branch (`git push origin feature/amazing-feature`)
|
||||
5. Open a Pull Request
|
||||
|
||||
### Development Setup
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
pip install pytest black flake8
|
||||
```
|
||||
|
||||
## 📚 Documentation
|
||||
|
||||
- **Technical Details**: See `SUMMARY.md`
|
||||
- **API Reference**: Check docstrings in source code
|
||||
- **Examples**: Look in `docs/examples/`
|
||||
- **Research Papers**: Referenced in `docs/papers/`
|
||||
|
||||
## 📄 License
|
||||
|
||||
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
||||
|
||||
## 🙏 Acknowledgments
|
||||
|
||||
- Contributors and maintainers
|
||||
- Research community for signal propagation models
|
||||
- Open source libraries: NumPy, Pandas, Matplotlib, SciPy, DEAP
|
||||
- Academic institutions for theoretical foundations
|
||||
|
||||
## 📞 Support
|
||||
|
||||
- **Issues**: Create a GitHub issue
|
||||
- **Discussions**: Use GitHub Discussions
|
||||
- **Email**: Contact maintainers directly
|
||||
|
||||
---
|
||||
|
||||
**Ready to optimize your WiFi network?** Start with `python src/main_four_ap.py` and see the magic happen! 🚀
|
||||
242
SUMMARY.md
Normal file
242
SUMMARY.md
Normal file
@@ -0,0 +1,242 @@
|
||||
# WiFi Signal Prediction Project: Summary of Results
|
||||
|
||||
## What We Built
|
||||
|
||||
We've developed a smart system that can predict and visualize WiFi signal strength throughout a building. Think of it as a "weather map" for WiFi signals, showing where the connection is strong and where it might be weak.
|
||||
|
||||
## Key Features
|
||||
|
||||
### 1. Signal Mapping
|
||||
- Creates "heat maps" showing WiFi signal strength across your building
|
||||
- Identifies potential dead zones and areas of strong coverage
|
||||
- Shows how signals from different WiFi access points overlap
|
||||
|
||||
### 2. Smart Predictions
|
||||
We used three different prediction methods:
|
||||
- **K-Nearest Neighbors (KNN)**: Like asking your neighbors how good their WiFi is
|
||||
- **Support Vector Machine (SVM)**: Finds patterns in complex signal behaviors
|
||||
- **Random Forest**: Combines multiple predictions for better accuracy
|
||||
|
||||
### 3. Visual Tools
|
||||
- **Building Layout View**: Shows signal strength overlaid on your floor plan
|
||||
- **3D Signal Maps**: Visualizes how signals spread across different areas
|
||||
- **Coverage Analysis**: Identifies where additional WiFi access points might be needed
|
||||
|
||||
## Results in Numbers
|
||||
|
||||
Our testing shows impressive performance across all models:
|
||||
|
||||
### Model Performance Comparison
|
||||

|
||||
|
||||
#### Random Forest (Best Performing Model)
|
||||
- **RMSE**: 0.01 (lower is better)
|
||||
- **R² Score**: 1.00 (perfect prediction)
|
||||
- **Cross-validation RMSE**: 0.01 (±0.01)
|
||||
- Best overall performance with most consistent predictions
|
||||
|
||||
#### Support Vector Machine (SVM)
|
||||
- **RMSE**: 0.10
|
||||
- **R² Score**: 0.99
|
||||
- **Cross-validation RMSE**: 0.09 (±0.02)
|
||||
- Good performance with slightly more variation
|
||||
|
||||
#### K-Nearest Neighbors (KNN)
|
||||
- **RMSE**: 0.15
|
||||
- **R² Score**: 0.98
|
||||
- **Cross-validation RMSE**: 0.12 (±0.04)
|
||||
- Solid performance with more sensitivity to local variations
|
||||
|
||||
### Key Performance Metrics Explained
|
||||
- **RMSE** (Root Mean Square Error): Measures prediction accuracy in dBm
|
||||
- **R² Score**: Shows how well the model fits the data (1.0 = perfect fit)
|
||||
- **Cross-validation**: Shows model consistency across different data splits
|
||||
- **Standard Deviation (±)**: Shows prediction stability
|
||||
|
||||
The Random Forest model consistently outperforms other approaches, providing:
|
||||
- Near-perfect prediction accuracy
|
||||
- Excellent generalization to new data
|
||||
- High stability across different scenarios
|
||||
- Reliable performance for real-world applications
|
||||
|
||||
## Current Visualization Capabilities
|
||||
|
||||
### 1. Coverage Mapping
|
||||
- **Individual AP Coverage**: Detailed heatmaps showing signal strength for each access point
|
||||
- **Combined Coverage**: Overall signal strength map using the best signal at each point
|
||||
- **Material Overlay**: Building structure visualization showing walls and materials
|
||||
|
||||
### 2. Statistical Analysis
|
||||
- **Average Signal Strength**: Bar plots comparing mean RSSI values across APs
|
||||
- Good signal threshold (-70 dBm)
|
||||
- Fair signal threshold (-80 dBm)
|
||||
- Actual values displayed on bars
|
||||
|
||||
- **Coverage Analysis**: Percentage of area covered by each AP
|
||||
- Good coverage (≥ -70 dBm)
|
||||
- Fair coverage (≥ -80 dBm)
|
||||
- Grouped bar plots with percentage labels
|
||||
|
||||
- **Signal Distribution**: KDE plots showing signal strength patterns
|
||||
- Individual distribution curve for each AP
|
||||
- Signal quality threshold indicators
|
||||
- Clear legend and grid lines
|
||||
|
||||
### 3. Data Collection
|
||||
- High-resolution sampling grid (200x120 points)
|
||||
- Signal strength measurements in dBm
|
||||
- Material effects on signal propagation
|
||||
- Raw data saved in CSV format
|
||||
|
||||
### 4. Future Enhancements
|
||||
- Machine learning model integration
|
||||
- Prediction accuracy visualization
|
||||
- Feature importance analysis
|
||||
- Time-series signal analysis
|
||||
- 3D signal mapping capabilities
|
||||
|
||||
## Technical Details
|
||||
|
||||
### Resolution and Accuracy
|
||||
- Sampling resolution: 0.25m x 0.25m
|
||||
- Signal strength range: -100 dBm to -30 dBm
|
||||
- Material attenuation modeling
|
||||
- Path loss calculations
|
||||
|
||||
### Building Layout
|
||||
- Dimensions: 50m x 30m
|
||||
- Multiple room configurations
|
||||
- Various building materials:
|
||||
- Concrete walls
|
||||
- Glass windows
|
||||
- Wooden doors
|
||||
- Drywall partitions
|
||||
|
||||
### Access Point Configuration
|
||||
- 4 APs strategically placed
|
||||
- Coverage optimization
|
||||
- Interference minimization
|
||||
- Consistent positioning
|
||||
|
||||
## Practical Applications
|
||||
|
||||
### 1. Network Planning
|
||||
- Identify optimal AP locations
|
||||
- Evaluate coverage patterns
|
||||
- Assess signal quality distribution
|
||||
|
||||
### 2. Performance Analysis
|
||||
- Compare AP performance
|
||||
- Identify coverage gaps
|
||||
- Analyze signal distribution
|
||||
|
||||
### 3. Optimization
|
||||
- Coverage area maximization
|
||||
- Signal strength improvement
|
||||
- Dead zone elimination
|
||||
|
||||
## Real-World Benefits
|
||||
|
||||
1. **Better WiFi Planning**
|
||||
- Know exactly where to place new WiFi access points
|
||||
- Understand how building layout affects signal strength
|
||||
- Predict coverage before installing equipment
|
||||
|
||||
2. **Problem Solving**
|
||||
- Quickly identify causes of poor connectivity
|
||||
- Find the best locations for WiFi-dependent devices
|
||||
- Plan for optimal coverage in new office layouts
|
||||
|
||||
3. **Cost Savings**
|
||||
- Avoid installing unnecessary access points
|
||||
- Optimize placement of existing equipment
|
||||
- Reduce time spent troubleshooting WiFi issues
|
||||
|
||||
## Example Use Cases
|
||||
|
||||
1. **Office Renovation**
|
||||
- Before moving desks or adding walls, see how it affects WiFi coverage
|
||||
- Plan new access point locations based on predicted needs
|
||||
|
||||
2. **Coverage Optimization**
|
||||
- Identify the minimum number of access points needed
|
||||
- Find the best locations for consistent coverage
|
||||
- Reduce interference between access points
|
||||
|
||||
3. **Troubleshooting**
|
||||
- Visualize why certain areas have poor connectivity
|
||||
- Test different solutions before implementation
|
||||
- Validate improvements after changes
|
||||
|
||||
## Technical Achievement
|
||||
|
||||
The system successfully combines:
|
||||
- Advanced machine learning techniques
|
||||
- Real-world WiFi signal analysis
|
||||
- User-friendly visualizations
|
||||
- Practical building layout integration
|
||||
|
||||
## Next Steps
|
||||
|
||||
We can extend the system to:
|
||||
1. Include multi-floor analysis
|
||||
2. Account for different building materials
|
||||
3. Add real-time monitoring capabilities
|
||||
4. Integrate with existing network management tools
|
||||
|
||||
## Impact
|
||||
|
||||
This tool helps:
|
||||
- IT teams plan better WiFi coverage
|
||||
- Facilities teams optimize office layouts
|
||||
- Management make informed decisions about network infrastructure
|
||||
- End users get better WiFi experience
|
||||
|
||||
## Visual Examples
|
||||
|
||||
The system generates several types of visualizations:
|
||||
|
||||
### 1. Building Coverage Map
|
||||

|
||||
- Shows how WiFi signals cover your space
|
||||
- Identifies potential dead zones
|
||||
- Displays coverage overlap between access points
|
||||
- Helps optimize access point placement
|
||||
|
||||
### 2. Signal Distribution Analysis
|
||||

|
||||
- Shows the range of signal strengths across your space
|
||||
- Helps identify consistent vs problematic areas
|
||||
- Compares performance of different access points
|
||||
- Guides optimization decisions
|
||||
|
||||
### 3. Average Signal Strength
|
||||

|
||||
- Shows average signal strength across the space
|
||||
- Helps identify overall coverage patterns
|
||||
- Useful for comparing different network configurations
|
||||
|
||||
### 4. Feature Importance Analysis
|
||||

|
||||
- Shows what factors most affect signal strength
|
||||
- Helps focus optimization efforts
|
||||
- Guides troubleshooting processes
|
||||
- Informs network planning decisions
|
||||
|
||||
## Getting Started
|
||||
|
||||
The system is ready to use and requires minimal setup:
|
||||
1. Input your building layout
|
||||
2. Mark existing access point locations
|
||||
3. Run the analysis
|
||||
4. View the results and recommendations
|
||||
|
||||
## Bottom Line
|
||||
|
||||
This project brings enterprise-grade WiFi planning capabilities to any organization, making it easier to:
|
||||
- Plan network improvements
|
||||
- Solve coverage problems
|
||||
- Optimize WiFi performance
|
||||
- Save time and money on network infrastructure
|
||||
|
||||
For technical details and implementation specifics, please refer to the project documentation in the README.md file.
|
||||
33
docs/slides/01_system_architecture.md
Normal file
33
docs/slides/01_system_architecture.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# WiFi Signal Prediction System Architecture
|
||||
|
||||
## System Components
|
||||
|
||||
### 1. Data Collection Module
|
||||
- WiFi Data Collector: Simulates signal strength measurements
|
||||
- Material Physics Engine: Models signal attenuation through different materials
|
||||
- Sampling Grid: High-resolution 200x120 point sampling
|
||||
|
||||
### 2. Physics Simulation
|
||||
- Material Properties:
|
||||
- Concrete, Glass, Wood, Drywall
|
||||
- Each with specific permittivity and conductivity values
|
||||
- Thickness-based attenuation modeling
|
||||
|
||||
### 3. Visualization System
|
||||
- Building Layout Engine
|
||||
- Material Grid System (0.1m resolution)
|
||||
- Complex Office Layout Support
|
||||
- Multi-layer Material Handling
|
||||
|
||||
- Signal Visualization
|
||||
- Heatmap Generation
|
||||
- Gaussian Interpolation
|
||||
- Material Overlay System
|
||||
- Access Point Markers
|
||||
|
||||
### 4. Data Flow
|
||||
1. Building Layout Definition → Material Grid
|
||||
2. AP Placement → Signal Source Points
|
||||
3. Physics-based Signal Propagation
|
||||
4. Data Collection & Processing
|
||||
5. Visualization Generation
|
||||
30
docs/slides/02_evaluation.md
Normal file
30
docs/slides/02_evaluation.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# System Evaluation
|
||||
|
||||
## Testing Methodology
|
||||
|
||||
### 1. Signal Propagation Accuracy
|
||||
- Physics-based validation against theoretical models
|
||||
- Material attenuation verification
|
||||
- Multi-path signal handling assessment
|
||||
|
||||
### 2. Spatial Resolution Testing
|
||||
- Grid density analysis (0.1m resolution)
|
||||
- Edge case handling at material boundaries
|
||||
- Signal interpolation accuracy
|
||||
|
||||
### 3. Performance Metrics
|
||||
- Computation time for different building sizes
|
||||
- Memory usage optimization
|
||||
- Visualization rendering speed
|
||||
|
||||
### 4. Visualization Quality
|
||||
- Heatmap clarity and readability
|
||||
- Material overlay effectiveness
|
||||
- Access point marker visibility
|
||||
- Legend and label readability
|
||||
|
||||
### 5. System Robustness
|
||||
- Multiple AP configurations
|
||||
- Complex building layouts
|
||||
- Various material combinations
|
||||
- Edge case handling
|
||||
30
docs/slides/03_results.md
Normal file
30
docs/slides/03_results.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# Results and Achievements
|
||||
|
||||
## Signal Characteristics
|
||||
|
||||
### 1. Signal Properties
|
||||
- Operating Frequency: 2.4 GHz
|
||||
- Transmit Power: 20 dBm
|
||||
- Noise Floor: -96.0 dBm
|
||||
- Signal Quality Range: 0-1 (normalized from RSSI)
|
||||
|
||||
### 2. Material Attenuation (2.4 GHz)
|
||||
- Concrete (20cm): 4.5 εr, 0.014 S/m conductivity
|
||||
- Glass (6mm): 6.0 εr, 0.004 S/m conductivity
|
||||
- Wood (4cm): 2.1 εr, 0.002 S/m conductivity
|
||||
- Drywall (16mm): 2.0 εr, 0.001 S/m conductivity
|
||||
- Metal (2mm): 1.0 εr, 1e7 S/m conductivity
|
||||
|
||||
### 3. System Performance
|
||||
- Grid Resolution: 0.1m (10cm)
|
||||
- Sampling Points: 200x120 grid (24,000 points)
|
||||
- Coverage Area: 50m x 30m (1,500 m²)
|
||||
- Signal Range: Typically -30 dBm to -90 dBm
|
||||
|
||||
### 4. Visualization Improvements
|
||||
- AP Marker Size: 3000-4000 units
|
||||
- High-Resolution Output: 600 DPI
|
||||
- Material Overlay: 0.5 alpha transparency
|
||||
- Support for Multiple APs (up to 4)
|
||||
- Channel Separation: 5 channels
|
||||
- Realistic Noise: σ = 2 dB
|
||||
33
docs/slides/slide1_system_architecture.txt
Normal file
33
docs/slides/slide1_system_architecture.txt
Normal file
@@ -0,0 +1,33 @@
|
||||
Slide 1: System Architecture
|
||||
|
||||
System Components
|
||||
|
||||
1. Data Collection Module
|
||||
• WiFi Data Collector: Simulates signal strength measurements
|
||||
• Material Physics Engine: Models signal attenuation through different materials
|
||||
• Sampling Grid: High-resolution 200x120 point sampling
|
||||
|
||||
2. Physics Simulation
|
||||
• Material Properties:
|
||||
- Concrete, Glass, Wood, Drywall
|
||||
- Each with specific permittivity and conductivity values
|
||||
- Thickness-based attenuation modeling
|
||||
|
||||
3. Visualization System
|
||||
• Building Layout Engine
|
||||
- Material Grid System (0.1m resolution)
|
||||
- Complex Office Layout Support
|
||||
- Multi-layer Material Handling
|
||||
|
||||
• Signal Visualization
|
||||
- Heatmap Generation
|
||||
- Gaussian Interpolation
|
||||
- Material Overlay System
|
||||
- Access Point Markers
|
||||
|
||||
4. Data Flow
|
||||
1. Building Layout Definition → Material Grid
|
||||
2. AP Placement → Signal Source Points
|
||||
3. Physics-based Signal Propagation
|
||||
4. Data Collection & Processing
|
||||
5. Visualization Generation
|
||||
30
docs/slides/slide2_evaluation.txt
Normal file
30
docs/slides/slide2_evaluation.txt
Normal file
@@ -0,0 +1,30 @@
|
||||
Slide 2: System Evaluation
|
||||
|
||||
Testing Methodology
|
||||
|
||||
1. Signal Propagation Accuracy
|
||||
• Physics-based validation against theoretical models
|
||||
• Material attenuation verification
|
||||
• Multi-path signal handling assessment
|
||||
|
||||
2. Spatial Resolution Testing
|
||||
• Grid density analysis (0.1m resolution)
|
||||
• Edge case handling at material boundaries
|
||||
• Signal interpolation accuracy
|
||||
|
||||
3. Performance Metrics
|
||||
• Computation time for different building sizes
|
||||
• Memory usage optimization
|
||||
• Visualization rendering speed
|
||||
|
||||
4. Visualization Quality
|
||||
• Heatmap clarity and readability
|
||||
• Material overlay effectiveness
|
||||
• Access point marker visibility
|
||||
• Legend and label readability
|
||||
|
||||
5. System Robustness
|
||||
• Multiple AP configurations
|
||||
• Complex building layouts
|
||||
• Various material combinations
|
||||
• Edge case handling
|
||||
14
docs/slides/slide3_results_signal.txt
Normal file
14
docs/slides/slide3_results_signal.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
Slide 3: Signal Characteristics
|
||||
|
||||
1. Signal Properties
|
||||
• Operating Frequency: 2.4 GHz
|
||||
• Transmit Power: 20 dBm
|
||||
• Noise Floor: -96.0 dBm
|
||||
• Signal Quality Range: 0-1 (normalized from RSSI)
|
||||
|
||||
2. Material Attenuation (2.4 GHz)
|
||||
• Concrete (20cm): 4.5 εr, 0.014 S/m conductivity
|
||||
• Glass (6mm): 6.0 εr, 0.004 S/m conductivity
|
||||
• Wood (4cm): 2.1 εr, 0.002 S/m conductivity
|
||||
• Drywall (16mm): 2.0 εr, 0.001 S/m conductivity
|
||||
• Metal (2mm): 1.0 εr, 1e7 S/m conductivity
|
||||
15
docs/slides/slide4_results_performance.txt
Normal file
15
docs/slides/slide4_results_performance.txt
Normal file
@@ -0,0 +1,15 @@
|
||||
Slide 4: System Performance & Visualization
|
||||
|
||||
1. System Performance
|
||||
• Grid Resolution: 0.1m (10cm)
|
||||
• Sampling Points: 200x120 grid (24,000 points)
|
||||
• Coverage Area: 50m x 30m (1,500 m²)
|
||||
• Signal Range: Typically -30 dBm to -90 dBm
|
||||
|
||||
2. Visualization Improvements
|
||||
• AP Marker Size: 3000-4000 units
|
||||
• High-Resolution Output: 600 DPI
|
||||
• Material Overlay: 0.5 alpha transparency
|
||||
• Support for Multiple APs (up to 4)
|
||||
- Channel Separation: 5 channels
|
||||
- Realistic Noise: σ = 2 dB
|
||||
1
docs/wifi_presentation.pptx
Normal file
1
docs/wifi_presentation.pptx
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
docs/wifi_signal_prediction.pptx
Normal file
1
docs/wifi_signal_prediction.pptx
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
203
floor_plans/finalmap.json
Normal file
203
floor_plans/finalmap.json
Normal file
@@ -0,0 +1,203 @@
|
||||
{
|
||||
"building": {
|
||||
"width": 40.0,
|
||||
"height": 3.0,
|
||||
"length": 50.0,
|
||||
"resolution": 0.2
|
||||
},
|
||||
"target_coverage": 0.9,
|
||||
"propagation_model": "fast_ray_tracing",
|
||||
"placement_strategy": "material_aware",
|
||||
"quick_mode": false,
|
||||
"ap_mode": "manual",
|
||||
"scale": {
|
||||
"pixel_to_meter": 0.0684257329142735
|
||||
},
|
||||
"rois": [
|
||||
{
|
||||
"points": [
|
||||
[
|
||||
16.01162150194,
|
||||
29.55991661896615
|
||||
],
|
||||
[
|
||||
35.44452964959367,
|
||||
29.696768084794694
|
||||
],
|
||||
[
|
||||
35.3761039166794,
|
||||
21.622531600910424
|
||||
],
|
||||
[
|
||||
48.71912183496273,
|
||||
21.6909573338247
|
||||
],
|
||||
[
|
||||
48.71912183496273,
|
||||
33.66546059382256
|
||||
],
|
||||
[
|
||||
16.08004723485427,
|
||||
33.66546059382256
|
||||
],
|
||||
[
|
||||
16.08004723485427,
|
||||
29.76519381770897
|
||||
]
|
||||
],
|
||||
"lengths_m": [
|
||||
19.433390013037968,
|
||||
8.074526418224954,
|
||||
13.343193367727041,
|
||||
11.974503259997862,
|
||||
32.63907460010846,
|
||||
3.900266776113589,
|
||||
0.21638116657545525
|
||||
]
|
||||
}
|
||||
],
|
||||
"boundaries": [],
|
||||
"regions": [
|
||||
{
|
||||
"name": "room1",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "rectangle",
|
||||
"coords": [
|
||||
36.06036124582213,
|
||||
22.443640395881705,
|
||||
45.98209251839179,
|
||||
26.82288730239521
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "rectangle",
|
||||
"coords": [
|
||||
16.42217589942564,
|
||||
30.38102541393743,
|
||||
22.375214662967434,
|
||||
32.84435179885128
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "fg",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "rectangle",
|
||||
"coords": [
|
||||
31.475837140565808,
|
||||
30.654728345594524,
|
||||
35.44452964959367,
|
||||
32.775926065937
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "rectangle",
|
||||
"coords": [
|
||||
47.55588437542008,
|
||||
26.754461569480934,
|
||||
47.7611615741629,
|
||||
32.50222313427991
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "rectangle",
|
||||
"coords": [
|
||||
38.66053909656453,
|
||||
28.122976227766404,
|
||||
43.24506320182085,
|
||||
31.88639153805145
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "circle",
|
||||
"coords": [
|
||||
47.14532997793444,
|
||||
23.606877855424354,
|
||||
0.5642530486317051
|
||||
]
|
||||
}
|
||||
],
|
||||
"materials": [],
|
||||
"aps": [
|
||||
{
|
||||
"x": 38.18155896616461,
|
||||
"y": 25.0438182466241,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
},
|
||||
{
|
||||
"x": 19.501333880567945,
|
||||
"y": 31.7495400722229,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
},
|
||||
{
|
||||
"x": 43.99774626387786,
|
||||
"y": 24.770115314967004,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
},
|
||||
{
|
||||
"x": 40.85016254982128,
|
||||
"y": 30.038896749366064,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
},
|
||||
{
|
||||
"x": 33.66546059382256,
|
||||
"y": 32.02324300388,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
}
|
||||
]
|
||||
}
|
||||
BIN
floor_plans/floorplan.jpg
Normal file
BIN
floor_plans/floorplan.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 30 KiB |
161
floor_plans/floorplan.json
Normal file
161
floor_plans/floorplan.json
Normal file
@@ -0,0 +1,161 @@
|
||||
{
|
||||
"building": {
|
||||
"width": 40.0,
|
||||
"height": 3.0,
|
||||
"length": 50.0,
|
||||
"resolution": 0.2
|
||||
},
|
||||
"target_coverage": 0.9,
|
||||
"propagation_model": "fast_ray_tracing",
|
||||
"placement_strategy": "material_aware",
|
||||
"quick_mode": false,
|
||||
"ap_mode": "manual",
|
||||
"scale": {
|
||||
"pixel_to_meter": 0.06412234498911869
|
||||
},
|
||||
"rois": [
|
||||
{
|
||||
"points": [
|
||||
[
|
||||
15.004628727453772,
|
||||
27.76497538028839
|
||||
],
|
||||
[
|
||||
33.40774173933084,
|
||||
27.95734241525575
|
||||
],
|
||||
[
|
||||
33.40774173933084,
|
||||
20.45502805152886
|
||||
],
|
||||
[
|
||||
45.71923197724163,
|
||||
20.390905706539744
|
||||
],
|
||||
[
|
||||
45.59098728726339,
|
||||
31.419949044668158
|
||||
],
|
||||
[
|
||||
14.940506382464655,
|
||||
31.35582669967904
|
||||
],
|
||||
[
|
||||
15.068751072442891,
|
||||
27.70085303529927
|
||||
]
|
||||
],
|
||||
"lengths_m": [
|
||||
18.40411838703666,
|
||||
7.502314363726886,
|
||||
12.311657222051771,
|
||||
11.029788921589677,
|
||||
30.65054797830797,
|
||||
3.6572228791553387,
|
||||
0.09068268993477813
|
||||
]
|
||||
}
|
||||
],
|
||||
"boundaries": [],
|
||||
"regions": [
|
||||
{
|
||||
"name": "a",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "circle",
|
||||
"coords": [
|
||||
2.3683248730964475,
|
||||
1.4884263959390867,
|
||||
0.10674529480665189
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "ax",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "rectangle",
|
||||
"coords": [
|
||||
1.1265989847715738,
|
||||
1.8790355329949244,
|
||||
2.076395939086295,
|
||||
1.9324873096446706
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "ds",
|
||||
"type": "office",
|
||||
"material": "brick",
|
||||
"thickness_m": 0.2,
|
||||
"room": true,
|
||||
"shape": "rectangle",
|
||||
"coords": [
|
||||
2.6643654822335034,
|
||||
1.430862944162437,
|
||||
2.6602538071065998,
|
||||
1.4103045685279192
|
||||
]
|
||||
}
|
||||
],
|
||||
"materials": [],
|
||||
"aps": [
|
||||
{
|
||||
"x": 18.787847081811776,
|
||||
"y": 30.650480904798734,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
},
|
||||
{
|
||||
"x": 34.3054545691785,
|
||||
"y": 25.328326270701883,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
},
|
||||
{
|
||||
"x": 20.326783361550625,
|
||||
"y": 31.035214974733446,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
},
|
||||
{
|
||||
"x": 41.03830079303596,
|
||||
"y": 27.059629585408086,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
},
|
||||
{
|
||||
"x": 36.93447071373237,
|
||||
"y": 23.276411231050083,
|
||||
"z": 2.7,
|
||||
"tx_power": 20.0,
|
||||
"frequency": 2.4,
|
||||
"wifi_standard": "802.11n",
|
||||
"coverage": 20.0,
|
||||
"size": 0.2,
|
||||
"max_height": 2.7
|
||||
}
|
||||
]
|
||||
}
|
||||
24554
floor_plans/floorplan_line_materials.json
Normal file
24554
floor_plans/floorplan_line_materials.json
Normal file
File diff suppressed because it is too large
Load Diff
192
floor_plans/floorplantest.json
Normal file
192
floor_plans/floorplantest.json
Normal file
@@ -0,0 +1,192 @@
|
||||
{
|
||||
"image_path": "D:\\Projects\\wifi-signal-prediction-main\\floor_plans\\tes1.png",
|
||||
"width_meters": 50.0,
|
||||
"height_meters": 7.0,
|
||||
"resolution": 0.2,
|
||||
"use_custom_boundary": false,
|
||||
"building_boundary": [
|
||||
[
|
||||
0,
|
||||
0
|
||||
],
|
||||
[
|
||||
50.0,
|
||||
0
|
||||
],
|
||||
[
|
||||
50.0,
|
||||
7.0
|
||||
],
|
||||
[
|
||||
0,
|
||||
7.0
|
||||
],
|
||||
[
|
||||
0,
|
||||
0
|
||||
]
|
||||
],
|
||||
"regions": [
|
||||
{
|
||||
"x": 0.0,
|
||||
"y": 7.031484257871064,
|
||||
"width": 50.25125628140703,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "brick"
|
||||
},
|
||||
{
|
||||
"x": 50.25125628140703,
|
||||
"y": 0.0,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 7.031484257871064,
|
||||
"material": "brick"
|
||||
},
|
||||
{
|
||||
"x": 0.0,
|
||||
"y": 0.0,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 7.031484257871064,
|
||||
"material": "brick"
|
||||
},
|
||||
{
|
||||
"x": 0.0,
|
||||
"y": 0.0,
|
||||
"width": 50.25125628140703,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "brick"
|
||||
},
|
||||
{
|
||||
"x": 0.0,
|
||||
"y": 0.7346326836581709,
|
||||
"width": 50.25125628140703,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "glass"
|
||||
},
|
||||
{
|
||||
"x": 11.306532663316583,
|
||||
"y": 0.0,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.7346326836581709,
|
||||
"material": "drywall"
|
||||
},
|
||||
{
|
||||
"x": 20.938023450586265,
|
||||
"y": 0.0,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.7346326836581709,
|
||||
"material": "drywall"
|
||||
},
|
||||
{
|
||||
"x": 37.68844221105528,
|
||||
"y": 0.0,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.7346326836581709,
|
||||
"material": "drywall"
|
||||
},
|
||||
{
|
||||
"x": 0.0,
|
||||
"y": 1.7841079460269866,
|
||||
"width": 20.100502512562816,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "glass"
|
||||
},
|
||||
{
|
||||
"x": 0.0,
|
||||
"y": 2.623688155922039,
|
||||
"width": 20.100502512562816,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "glass"
|
||||
},
|
||||
{
|
||||
"x": 4.1876046901172534,
|
||||
"y": 1.7841079460269866,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.3148425787106447,
|
||||
"material": "glass"
|
||||
},
|
||||
{
|
||||
"x": 8.375209380234507,
|
||||
"y": 1.7841079460269866,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.3148425787106447,
|
||||
"material": "drywall"
|
||||
},
|
||||
{
|
||||
"x": 16.750418760469014,
|
||||
"y": 1.4692653673163418,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.3148425787106447,
|
||||
"material": "drywall"
|
||||
},
|
||||
{
|
||||
"x": 33.50083752093803,
|
||||
"y": 6.401799100449775,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.5247376311844079,
|
||||
"material": "drywall"
|
||||
},
|
||||
{
|
||||
"x": 33.50083752093803,
|
||||
"y": 6.401799100449775,
|
||||
"width": 16.750418760469014,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "glass"
|
||||
},
|
||||
{
|
||||
"x": 0.0,
|
||||
"y": 6.401799100449775,
|
||||
"width": 19.262981574539364,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "glass"
|
||||
},
|
||||
{
|
||||
"x": 20.938023450586265,
|
||||
"y": 1.5742128935532234,
|
||||
"width": 20.938023450586265,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "drywall"
|
||||
},
|
||||
{
|
||||
"x": 41.87604690117253,
|
||||
"y": 3.673163418290854,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.5247376311844079,
|
||||
"material": "glass"
|
||||
},
|
||||
{
|
||||
"x": 41.87604690117253,
|
||||
"y": 1.5742128935532234,
|
||||
"width": 0.33500837520938026,
|
||||
"height": 0.8395802098950524,
|
||||
"material": "drywall"
|
||||
},
|
||||
{
|
||||
"x": 41.87604690117253,
|
||||
"y": 2.0989505247376314,
|
||||
"width": 7.537688442211055,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "glass"
|
||||
},
|
||||
{
|
||||
"x": 21.775544388609717,
|
||||
"y": 2.623688155922039,
|
||||
"width": 6.700167504187604,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "plastic"
|
||||
},
|
||||
{
|
||||
"x": 21.775544388609717,
|
||||
"y": 3.1484257871064467,
|
||||
"width": 6.700167504187604,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "plastic"
|
||||
},
|
||||
{
|
||||
"x": 16.750418760469014,
|
||||
"y": 4.197901049475263,
|
||||
"width": 6.700167504187604,
|
||||
"height": 0.041979010494752625,
|
||||
"material": "drywall"
|
||||
}
|
||||
]
|
||||
}
|
||||
1526
package-lock.json
generated
Normal file
1526
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
15
package.json
Normal file
15
package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@emotion/react": "^11.14.0",
|
||||
"@emotion/styled": "^11.14.1",
|
||||
"@mui/icons-material": "^7.2.0",
|
||||
"@mui/material": "^7.2.0",
|
||||
"axios": "^1.10.0",
|
||||
"file-saver": "^2.0.5",
|
||||
"formik": "^2.4.6",
|
||||
"konva": "^9.3.22",
|
||||
"react-konva": "^19.0.7",
|
||||
"tqdm": "^2.0.3",
|
||||
"yup": "^1.6.1"
|
||||
}
|
||||
}
|
||||
15
requirements.txt
Normal file
15
requirements.txt
Normal file
@@ -0,0 +1,15 @@
|
||||
numpy>=1.21.0
|
||||
pandas>=1.3.0
|
||||
scikit-learn>=1.0.2
|
||||
matplotlib>=3.4.2
|
||||
seaborn>=0.11.1
|
||||
joblib>=1.0.1
|
||||
plotly>=5.3.1
|
||||
scipy>=1.7.0
|
||||
opencv-python>=4.5.0
|
||||
scikit-optimize>=0.9.0
|
||||
tqdm>=4.62.0
|
||||
orjson>=3.6.0
|
||||
scikit-image>=0.18.0
|
||||
deap>=1.3.1
|
||||
networkx>=2.6.3
|
||||
1
src/__init__.py
Normal file
1
src/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""WiFi signal strength prediction package."""
|
||||
581
src/advanced_heatmap_visualizer.py
Normal file
581
src/advanced_heatmap_visualizer.py
Normal file
@@ -0,0 +1,581 @@
|
||||
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
import numpy as np
|
||||
import os
|
||||
from matplotlib.patches import Circle, Rectangle, Polygon, FancyBboxPatch, PathPatch
|
||||
from matplotlib.colors import ListedColormap, LinearSegmentedColormap
|
||||
from matplotlib.collections import PatchCollection
|
||||
from typing import Dict, List, Tuple, Optional, Any, cast
|
||||
import matplotlib.colors as mcolors
|
||||
import scipy.ndimage
|
||||
import matplotlib.image as mpimg
|
||||
|
||||
|
||||
def get_sharp_green_pink_cmap():
|
||||
# Pink for bad (-90 to -65), green for good (-65 to 0)
|
||||
colors = ["#ff69b4", "#00ff00"] # pink, green
|
||||
cmap = mcolors.ListedColormap(colors)
|
||||
bounds = [-90, -65, 0]
|
||||
norm = mcolors.BoundaryNorm(bounds, cmap.N)
|
||||
return cmap, norm
|
||||
|
||||
class AdvancedHeatmapVisualizer:
|
||||
"""High-quality heatmap visualizer for WiFi signal strength analysis."""
|
||||
|
||||
def __init__(self, building_width: float, building_height: float):
|
||||
"""
|
||||
Initialize the visualizer.
|
||||
|
||||
Args:
|
||||
building_width: Width of the building in meters
|
||||
building_height: Height of the building in meters
|
||||
"""
|
||||
self.building_width = building_width
|
||||
self.building_height = building_height
|
||||
|
||||
# Set high-quality plotting style
|
||||
plt.style.use('default')
|
||||
plt.rcParams['figure.dpi'] = 300
|
||||
plt.rcParams['savefig.dpi'] = 300
|
||||
plt.rcParams['font.size'] = 10
|
||||
plt.rcParams['axes.titlesize'] = 14
|
||||
plt.rcParams['axes.labelsize'] = 12
|
||||
|
||||
# Use custom green-pink colormap
|
||||
self.custom_cmap = get_sharp_green_pink_cmap()
|
||||
self.norm = mcolors.Normalize(vmin=-100, vmax=0)
|
||||
|
||||
def create_comprehensive_visualizations(self, ap_locations: Dict[str, Any],
|
||||
materials_grid: Any, collector: Any,
|
||||
points: List[Tuple[float, float, float]],
|
||||
output_dir: str, engine: Any = None, regions: Optional[list] = None, roi_polygon: Optional[list] = None, background_image: Optional[str] = None, image_extent: Optional[list] = None) -> None:
|
||||
|
||||
if regions is None:
|
||||
regions = []
|
||||
# Create output directory
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Calculate signal strength grids
|
||||
ap_signal_grids, combined_signal_grid, x_unique, y_unique = self._calculate_signal_grids(
|
||||
ap_locations, collector, points
|
||||
)
|
||||
|
||||
# 1. Create Individual AP Heatmaps
|
||||
print("Creating individual AP heatmaps...")
|
||||
for ap_name, signal_grid in ap_signal_grids.items():
|
||||
self.create_individual_ap_heatmap(
|
||||
ap_name, signal_grid, ap_locations[ap_name],
|
||||
x_unique, y_unique, output_dir, materials_grid, regions=regions, roi_polygon=roi_polygon, background_image=background_image, image_extent=image_extent
|
||||
)
|
||||
|
||||
# 2. Create Combined Coverage Heatmap
|
||||
print("Creating combined coverage heatmap...")
|
||||
# Pass roi_polygon explicitly
|
||||
self.create_combined_coverage_heatmap(
|
||||
combined_signal_grid, ap_locations, x_unique, y_unique, output_dir, materials_grid, regions=regions, roi_polygon=roi_polygon, background_image=background_image, image_extent=image_extent
|
||||
)
|
||||
if background_image:
|
||||
self.create_combined_coverage_heatmap(
|
||||
combined_signal_grid, ap_locations, x_unique, y_unique, output_dir, materials_grid, regions=regions, roi_polygon=roi_polygon, background_image=background_image, image_extent=image_extent, suffix='_with_bg'
|
||||
)
|
||||
|
||||
# 3. Create Interactive Visualization
|
||||
print("Creating interactive visualization...")
|
||||
self.create_interactive_visualization(
|
||||
ap_signal_grids, combined_signal_grid, ap_locations,
|
||||
x_unique, y_unique, output_dir
|
||||
)
|
||||
|
||||
# 4. Create Signal Quality Analysis
|
||||
print("Creating signal quality analysis...")
|
||||
self.create_signal_quality_analysis(
|
||||
ap_signal_grids, combined_signal_grid, ap_locations, output_dir
|
||||
)
|
||||
|
||||
print(f"All visualizations saved to: {output_dir}")
|
||||
|
||||
def _calculate_signal_grids(self, ap_locations: Dict[str, Any], collector: Any,
|
||||
points: List[Tuple[float, float, float]]) -> Tuple[Dict, np.ndarray, np.ndarray, np.ndarray]:
|
||||
"""Calculate signal strength grids for each AP and combined coverage."""
|
||||
# Extract coordinates
|
||||
x_coords = np.array([x for (x, y, z) in points])
|
||||
y_coords = np.array([y for (x, y, z) in points])
|
||||
|
||||
# --- HARD CAP: Downsample to a fixed grid size for plotting ---
|
||||
MAX_GRID_SIZE = 200
|
||||
MIN_GRID_SIZE = 50
|
||||
x_min, x_max = x_coords.min(), x_coords.max()
|
||||
y_min, y_max = y_coords.min(), y_coords.max()
|
||||
# --- Degenerate grid check ---
|
||||
if x_min == x_max or y_min == y_max:
|
||||
raise ValueError(f"Cannot plot: all x or y values are the same (x: {x_min}–{x_max}, y: {y_min}–{y_max}). Check your input data, ROI, and region definitions.")
|
||||
n_x = max(MIN_GRID_SIZE, min(MAX_GRID_SIZE, len(np.unique(x_coords))))
|
||||
n_y = max(MIN_GRID_SIZE, min(MAX_GRID_SIZE, len(np.unique(y_coords))))
|
||||
x_unique = np.linspace(x_min, x_max, n_x)
|
||||
y_unique = np.linspace(y_min, y_max, n_y)
|
||||
grid_shape = (len(y_unique), len(x_unique))
|
||||
print(f"[DEBUG] Plotting grid shape: {grid_shape}")
|
||||
|
||||
# Calculate individual AP signal grids
|
||||
ap_signal_grids = {}
|
||||
for ap_name, ap_coords in ap_locations.items():
|
||||
signal_grid = np.zeros(grid_shape)
|
||||
ap_x, ap_y = ap_coords[:2]
|
||||
for i, y in enumerate(y_unique):
|
||||
for j, x in enumerate(x_unique):
|
||||
distance = np.sqrt((x - ap_x)**2 + (y - ap_y)**2)
|
||||
signal = collector.calculate_rssi(distance, None)
|
||||
signal_grid[i, j] = signal
|
||||
ap_signal_grids[ap_name] = signal_grid
|
||||
|
||||
# Calculate combined signal grid (maximum signal at each point)
|
||||
combined_signal_grid = np.zeros(grid_shape)
|
||||
for i, y in enumerate(y_unique):
|
||||
for j, x in enumerate(x_unique):
|
||||
max_signal = -100
|
||||
for ap_name, ap_coords in ap_locations.items():
|
||||
ap_x, ap_y = ap_coords[:2]
|
||||
distance = np.sqrt((x - ap_x)**2 + (y - ap_y)**2)
|
||||
signal = collector.calculate_rssi(distance, None)
|
||||
max_signal = max(max_signal, signal)
|
||||
combined_signal_grid[i, j] = max_signal
|
||||
|
||||
return ap_signal_grids, combined_signal_grid, x_unique, y_unique
|
||||
|
||||
def create_individual_ap_heatmap(self, ap_name: str, signal_grid: np.ndarray,
|
||||
ap_coords: Tuple[float, float, float],
|
||||
x_unique: np.ndarray, y_unique: np.ndarray,
|
||||
output_dir: str, materials_grid: Any, regions: Optional[list]=None, roi_polygon: Optional[list]=None, background_image: Optional[str] = None, image_extent: Optional[list] = None) -> None:
|
||||
"""Create high-quality individual AP heatmap with green-pink colormap and region overlays."""
|
||||
masked_grid = np.ma.masked_less(signal_grid, -90)
|
||||
smooth_grid = scipy.ndimage.gaussian_filter(masked_grid, sigma=1.0)
|
||||
cmap = self.get_green_to_pink_cmap()
|
||||
fig, ax = plt.subplots(figsize=(8, 6), dpi=80)
|
||||
# Set extent to ROI bounding box if available
|
||||
if roi_polygon is not None and len(roi_polygon) >= 3:
|
||||
xs = [p[0] for p in roi_polygon]
|
||||
ys = [p[1] for p in roi_polygon]
|
||||
x0, x1 = min(xs), max(xs)
|
||||
y0, y1 = min(ys), max(ys)
|
||||
extent = (x0, x1, y0, y1)
|
||||
else:
|
||||
x0, x1 = float(x_unique[0]), float(x_unique[-1])
|
||||
y0, y1 = float(y_unique[0]), float(y_unique[-1])
|
||||
extent = (x0, x1, y0, y1)
|
||||
im = ax.imshow(
|
||||
smooth_grid.T,
|
||||
extent=extent,
|
||||
cmap=cmap,
|
||||
vmin=-90,
|
||||
vmax=0,
|
||||
interpolation='nearest',
|
||||
aspect='auto',
|
||||
alpha=0.95,
|
||||
zorder=2,
|
||||
origin='lower'
|
||||
)
|
||||
cbar = plt.colorbar(im, ax=ax, ticks=[0, -65, -90])
|
||||
cbar.ax.set_yticklabels(['0 (Strong)', '-65 (Good/Threshold)', '-90 (Weak)'])
|
||||
cbar.set_label('Signal Strength (dBm)', fontsize=12, fontweight='bold')
|
||||
# Do NOT invert y-axis so 0 is at the top, -90 at the bottom
|
||||
# Draw ROI boundary if provided
|
||||
if roi_polygon is not None and len(roi_polygon) >= 3:
|
||||
roi_patch = Polygon(roi_polygon, closed=True, fill=False, edgecolor='black', linewidth=4, linestyle='-', zorder=10)
|
||||
ax.add_patch(roi_patch)
|
||||
ax.set_xlim(min(xs), max(xs))
|
||||
ax.set_ylim(min(ys), max(ys))
|
||||
# Draw building regions (polygons) if available
|
||||
if regions is not None:
|
||||
palette = plt.get_cmap('tab20')
|
||||
for i, region in enumerate(regions):
|
||||
# Support both dict and object (BuildingRegion)
|
||||
if isinstance(region, dict):
|
||||
name = region.get('name', f'Region {i+1}')
|
||||
polygon = region.get('polygon')
|
||||
elif hasattr(region, 'name'):
|
||||
name = getattr(region, 'name', f'Region {i+1}')
|
||||
polygon = getattr(region, 'polygon', None)
|
||||
else:
|
||||
name = f'Region {i+1}'
|
||||
polygon = None
|
||||
# Draw polygons from 'polygon' key or attribute
|
||||
if polygon and isinstance(polygon, list) and len(polygon) >= 3:
|
||||
poly = Polygon(polygon, closed=True, fill=True, alpha=0.35, edgecolor='black', linewidth=1, facecolor=palette(i % 20), zorder=5)
|
||||
ax.add_patch(poly)
|
||||
centroid = np.mean(np.array(polygon), axis=0)
|
||||
ax.text(centroid[0], centroid[1], name, ha='center', va='center', fontsize=10, fontweight='bold', color='black', bbox=dict(facecolor='white', alpha=0.7, boxstyle='round,pad=0.2'), zorder=6)
|
||||
elif region.get('shape') == 'circle' and all(k in region for k in ('cx', 'cy', 'r')):
|
||||
cx, cy, r = region['cx'], region['cy'], region['r']
|
||||
circ = Circle((cx, cy), r, fill=True, alpha=0.35, edgecolor='black', linewidth=1, facecolor=palette(i % 20), zorder=5)
|
||||
ax.add_patch(circ)
|
||||
ax.text(cx, cy, name, fontsize=16, fontweight='bold', color='black', ha='center', va='center', zorder=12,
|
||||
bbox=dict(boxstyle="round,pad=0.3", facecolor="white", alpha=0.85, edgecolor='none', boxshadow=True))
|
||||
# Modern AP markers with drop shadow (smaller size)
|
||||
ap_x, ap_y = ap_coords[:2]
|
||||
shadow = Circle((ap_x+0.3, ap_y-0.3), 0.7, facecolor='gray', edgecolor='none', alpha=0.3, zorder=9)
|
||||
ax.add_patch(shadow)
|
||||
ap_circle = Circle((ap_x, ap_y), 0.6, facecolor='white', edgecolor='black', linewidth=3, alpha=0.95, zorder=10)
|
||||
ax.add_patch(ap_circle)
|
||||
color = plt.get_cmap('tab10')(0)
|
||||
ap_inner = Circle((ap_x, ap_y), 0.4, facecolor=color, edgecolor='none', alpha=0.95, zorder=11)
|
||||
ax.add_patch(ap_inner)
|
||||
ax.text(ap_x, ap_y, f'{ap_name}', fontsize=13, fontweight='bold', ha='center', va='center', color='white', zorder=12, bbox=dict(boxstyle="circle,pad=0.3", facecolor=color, alpha=0.8, edgecolor='none'))
|
||||
ax.text(ap_x, ap_y-2.1, f'({ap_x:.1f}, {ap_y:.1f})', fontsize=11, ha='center', va='top', color='black', alpha=0.7, zorder=12)
|
||||
ax.set_xlabel('X (meters)', fontsize=15, fontweight='bold')
|
||||
ax.set_ylabel('Y (meters)', fontsize=15, fontweight='bold')
|
||||
ax.set_title(f'AP {ap_name} Coverage Heatmap', fontsize=18, fontweight='bold', pad=18)
|
||||
ax.grid(False)
|
||||
plt.tight_layout()
|
||||
output_path = os.path.join(output_dir, f'{ap_name}_heatmap.png')
|
||||
plt.savefig(output_path, dpi=100, bbox_inches='tight', facecolor='white')
|
||||
plt.close()
|
||||
print(f"Individual AP heatmap saved: {output_path}")
|
||||
|
||||
def get_green_to_pink_cmap(self):
|
||||
# Custom colormap: 0 to -65 dBm = shades of green, -65 to -90 dBm = shades of pink
|
||||
from matplotlib.colors import LinearSegmentedColormap
|
||||
colors = [
|
||||
(0.0, '#008000'), # 0 dBm, dark green (top)
|
||||
(0.72, '#adffb0'), # -65 dBm, light green (middle)
|
||||
(0.72, '#ffd1e6'), # -65 dBm, light pink (boundary)
|
||||
(1.0, '#ff69b4') # -90 dBm, strong pink (bottom)
|
||||
]
|
||||
return LinearSegmentedColormap.from_list("green_to_pink", colors, N=256)
|
||||
|
||||
def create_combined_coverage_heatmap(self, combined_signal_grid: np.ndarray,
|
||||
ap_locations: Dict[str, Any],
|
||||
x_unique: np.ndarray, y_unique: np.ndarray,
|
||||
output_dir: str, materials_grid: Any, regions: Optional[list]=None, roi_polygon: Optional[list]=None, background_image: Optional[str] = None, image_extent: Optional[list] = None, suffix: str = '') -> None:
|
||||
# Mask out areas with signal below -90 dBm (no coverage)
|
||||
masked_grid = np.ma.masked_less(combined_signal_grid, -90)
|
||||
# Mask out areas outside the ROI polygon if provided
|
||||
if roi_polygon is not None and len(roi_polygon) >= 3:
|
||||
from matplotlib.path import Path
|
||||
roi_path = Path(roi_polygon)
|
||||
X, Y = np.meshgrid(x_unique, y_unique, indexing='ij')
|
||||
mask = np.zeros(X.shape, dtype=bool)
|
||||
for i in range(X.shape[0]):
|
||||
for j in range(X.shape[1]):
|
||||
mask[i, j] = not roi_path.contains_point((X[i, j], Y[i, j]))
|
||||
masked_grid = np.ma.masked_where(mask.T, masked_grid)
|
||||
# Use green-to-pink colormap
|
||||
cmap = self.get_green_to_pink_cmap()
|
||||
fig, ax = plt.subplots(figsize=(10, 8), dpi=120)
|
||||
# Set extent to ROI bounding box if available
|
||||
if roi_polygon is not None and len(roi_polygon) >= 3:
|
||||
xs = [p[0] for p in roi_polygon]
|
||||
ys = [p[1] for p in roi_polygon]
|
||||
x0, x1 = min(xs), max(xs)
|
||||
y0, y1 = min(ys), max(ys)
|
||||
extent = (x0, x1, y0, y1)
|
||||
else:
|
||||
x0, x1 = float(x_unique[0]), float(x_unique[-1])
|
||||
y0, y1 = float(y_unique[0]), float(y_unique[-1])
|
||||
extent = (x0, x1, y0, y1)
|
||||
im = ax.imshow(
|
||||
masked_grid.T,
|
||||
extent=extent,
|
||||
cmap=cmap,
|
||||
vmin=-90,
|
||||
vmax=0,
|
||||
interpolation='bilinear',
|
||||
aspect='auto',
|
||||
alpha=1.0,
|
||||
zorder=2,
|
||||
origin='lower'
|
||||
)
|
||||
# Colorbar outside plot
|
||||
cbar = plt.colorbar(im, ax=ax, pad=0.03, aspect=30, shrink=0.85, location='right', ticks=[0, -65, -90])
|
||||
cbar.set_label('Combined Signal Strength (dBm)', fontsize=16, fontweight='bold', labelpad=18)
|
||||
cbar.ax.tick_params(labelsize=14)
|
||||
cbar.set_ticks([0, -65, -90])
|
||||
cbar.set_ticklabels(['0 (Strong)', '-65 (Good/Threshold)', '-90 (Weak)'])
|
||||
# Do NOT invert y-axis so 0 is at the top, -90 at the bottom
|
||||
# Axes labels and ticks
|
||||
ax.set_xlabel('X (meters)', fontsize=18, fontweight='bold', labelpad=10)
|
||||
ax.set_ylabel('Y (meters)', fontsize=18, fontweight='bold', labelpad=10)
|
||||
ax.set_xticks(np.linspace(x0, x1, 6))
|
||||
ax.set_yticks(np.linspace(y0, y1, 6))
|
||||
ax.tick_params(axis='both', which='major', labelsize=14, length=0)
|
||||
# Title
|
||||
ax.set_title('Combined WiFi Coverage Heatmap', fontsize=26, fontweight='bold', pad=30)
|
||||
# Tight layout, white background
|
||||
plt.tight_layout(pad=2.0)
|
||||
fig.patch.set_facecolor('white')
|
||||
# Save
|
||||
output_path = os.path.join(output_dir, f'combined_coverage_heatmap{suffix}.png')
|
||||
plt.savefig(output_path, dpi=120, bbox_inches='tight', facecolor='white')
|
||||
plt.close()
|
||||
print(f"Combined coverage heatmap saved: {output_path}")
|
||||
|
||||
def _draw_building_regions(self, ax, materials_grid: Any) -> None:
|
||||
"""Draw building regions and materials on the plot."""
|
||||
if materials_grid is None:
|
||||
return
|
||||
|
||||
# Draw building outline
|
||||
building_rect = Rectangle((0, 0), self.building_width, self.building_height,
|
||||
fill=False, edgecolor='black', linewidth=3, alpha=0.8)
|
||||
ax.add_patch(building_rect)
|
||||
|
||||
# Draw material regions if available
|
||||
try:
|
||||
# This is a simplified version - you may need to adapt based on your materials_grid structure
|
||||
if hasattr(materials_grid, 'shape') and len(materials_grid.shape) >= 2:
|
||||
# Draw walls or material boundaries
|
||||
wall_rect = Rectangle((5, 5), self.building_width-10, self.building_height-10,
|
||||
fill=False, edgecolor='gray', linewidth=2, alpha=0.6)
|
||||
ax.add_patch(wall_rect)
|
||||
except Exception as e:
|
||||
# If materials_grid structure is different, just draw basic building outline
|
||||
pass
|
||||
|
||||
def create_interactive_visualization(self, ap_signal_grids: Dict[str, np.ndarray],
|
||||
combined_signal_grid: np.ndarray,
|
||||
ap_locations: Dict[str, Any],
|
||||
x_unique: np.ndarray, y_unique: np.ndarray,
|
||||
output_dir: str) -> None:
|
||||
"""Create interactive Plotly visualization."""
|
||||
try:
|
||||
import plotly.graph_objects as go
|
||||
from plotly.subplots import make_subplots
|
||||
|
||||
# Create subplots for individual APs and combined
|
||||
n_aps = len(ap_locations)
|
||||
fig = make_subplots(
|
||||
rows=2, cols=2,
|
||||
subplot_titles=['Combined Coverage'] + list(ap_locations.keys())[:3],
|
||||
specs=[[{"secondary_y": False}, {"secondary_y": False}],
|
||||
[{"secondary_y": False}, {"secondary_y": False}]]
|
||||
)
|
||||
|
||||
# Custom colorscale for signal strength
|
||||
colorscale = [
|
||||
[0, '#FF69B4'], # Pink for weak signal
|
||||
[0.35, '#FFB6C1'], # Light pink
|
||||
[0.65, '#00FF00'], # Green for good signal
|
||||
[1, '#008000'] # Dark green
|
||||
]
|
||||
|
||||
# Combined coverage heatmap
|
||||
fig.add_trace(
|
||||
go.Heatmap(
|
||||
z=combined_signal_grid,
|
||||
x=x_unique,
|
||||
y=y_unique,
|
||||
colorscale=colorscale,
|
||||
zmin=-100,
|
||||
zmax=0,
|
||||
name='Combined Coverage',
|
||||
showscale=True,
|
||||
colorbar=dict(title="Signal Strength (dBm)")
|
||||
),
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
# Individual AP heatmaps
|
||||
for i, (ap_name, signal_grid) in enumerate(list(ap_signal_grids.items())[:3]):
|
||||
row = (i + 1) // 2 + 1
|
||||
col = (i + 1) % 2 + 1
|
||||
|
||||
fig.add_trace(
|
||||
go.Heatmap(
|
||||
z=signal_grid,
|
||||
x=x_unique,
|
||||
y=y_unique,
|
||||
colorscale=colorscale,
|
||||
zmin=-100,
|
||||
zmax=0,
|
||||
name=f'{ap_name} Coverage',
|
||||
showscale=False
|
||||
),
|
||||
row=row, col=col
|
||||
)
|
||||
|
||||
# Add AP markers
|
||||
colors = ['red', 'blue', 'green', 'orange', 'purple', 'brown', 'pink', 'gray', 'olive', 'cyan']
|
||||
for i, (ap_name, ap_coords) in enumerate(ap_locations.items()):
|
||||
ap_x, ap_y = ap_coords[:2]
|
||||
color = colors[i % len(colors)]
|
||||
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=[ap_x],
|
||||
y=[ap_y],
|
||||
mode='markers+text',
|
||||
marker=dict(size=15, color=color, symbol='circle'),
|
||||
text=[ap_name],
|
||||
textposition="top center",
|
||||
name=f'{ap_name} Location',
|
||||
showlegend=False
|
||||
),
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
# Update layout
|
||||
fig.update_layout(
|
||||
title_text="Interactive WiFi Coverage Analysis",
|
||||
title_x=0.5,
|
||||
width=1200,
|
||||
height=800,
|
||||
showlegend=False
|
||||
)
|
||||
|
||||
# Save interactive HTML
|
||||
output_path = os.path.join(output_dir, 'interactive_coverage_analysis.html')
|
||||
fig.write_html(output_path)
|
||||
|
||||
print(f"Interactive visualization saved: {output_path}")
|
||||
|
||||
except ImportError:
|
||||
print("Plotly not available, skipping interactive visualization")
|
||||
|
||||
def create_signal_quality_analysis(self, ap_signal_grids: Dict[str, np.ndarray],
|
||||
combined_signal_grid: np.ndarray,
|
||||
ap_locations: Dict[str, Any], output_dir: str) -> None:
|
||||
"""Create signal quality analysis plots."""
|
||||
# Create figure with subplots
|
||||
fig, axes = plt.subplots(2, 2, figsize=(20, 16), dpi=300)
|
||||
|
||||
# 1. Signal Quality Distribution
|
||||
ax1 = axes[0, 0]
|
||||
all_signals = combined_signal_grid.flatten()
|
||||
|
||||
# Create histogram with custom bins
|
||||
bins = np.linspace(-100, 0, 50)
|
||||
n, bins, patches = ax1.hist(all_signals, bins=bins, alpha=0.7, color='skyblue', edgecolor='black')
|
||||
|
||||
# Color bins based on signal quality
|
||||
for i, (patch, bin_center) in enumerate(zip(patches, (bins[:-1] + bins[1:]) / 2)):
|
||||
if bin_center >= -65:
|
||||
patch.set_facecolor('green')
|
||||
else:
|
||||
patch.set_facecolor('pink')
|
||||
|
||||
ax1.axvline(x=-65, color='black', linestyle='--', linewidth=2, label='Good Signal Threshold (-65 dBm)')
|
||||
ax1.set_xlabel('Signal Strength (dBm)', fontsize=12, fontweight='bold')
|
||||
ax1.set_ylabel('Frequency', fontsize=12, fontweight='bold')
|
||||
ax1.set_title('Signal Quality Distribution', fontsize=14, fontweight='bold')
|
||||
ax1.legend()
|
||||
ax1.grid(True, alpha=0.3)
|
||||
|
||||
# 2. AP Performance Comparison
|
||||
ax2 = axes[0, 1]
|
||||
ap_names = list(ap_locations.keys())
|
||||
avg_signals = [np.mean(grid) for grid in ap_signal_grids.values()]
|
||||
good_coverage_percent = [np.sum(grid >= -65) / grid.size * 100 for grid in ap_signal_grids.values()]
|
||||
|
||||
x = np.arange(len(ap_names))
|
||||
width = 0.35
|
||||
|
||||
bars1 = ax2.bar(x - width/2, avg_signals, width, label='Average Signal (dBm)', alpha=0.8)
|
||||
ax2_twin = ax2.twinx()
|
||||
bars2 = ax2_twin.bar(x + width/2, good_coverage_percent, width, label='Good Coverage (%)', alpha=0.8, color='orange')
|
||||
|
||||
ax2.set_xlabel('Access Points', fontsize=12, fontweight='bold')
|
||||
ax2.set_ylabel('Average Signal (dBm)', fontsize=12, fontweight='bold')
|
||||
ax2_twin.set_ylabel('Good Coverage (%)', fontsize=12, fontweight='bold')
|
||||
ax2.set_title('AP Performance Comparison', fontsize=14, fontweight='bold')
|
||||
ax2.set_xticks(x)
|
||||
ax2.set_xticklabels(ap_names, rotation=45, ha='right')
|
||||
ax2.grid(True, alpha=0.3)
|
||||
|
||||
# Add value labels on bars
|
||||
for bar, value in zip(bars1, avg_signals):
|
||||
height = bar.get_height()
|
||||
ax2.text(bar.get_x() + bar.get_width()/2., height + 0.5,
|
||||
f'{value:.1f}', ha='center', va='bottom', fontweight='bold')
|
||||
|
||||
for bar, value in zip(bars2, good_coverage_percent):
|
||||
height = bar.get_height()
|
||||
ax2_twin.text(bar.get_x() + bar.get_width()/2., height + 0.5,
|
||||
f'{value:.1f}%', ha='center', va='bottom', fontweight='bold')
|
||||
|
||||
# 3. Coverage Quality Map
|
||||
ax3 = axes[1, 0]
|
||||
coverage_quality = np.where(combined_signal_grid >= -65, 1, 0) # Binary: good/bad coverage
|
||||
|
||||
im = ax3.imshow(coverage_quality, extent=(0, self.building_width, 0, self.building_height),
|
||||
origin='lower', cmap='RdYlGn', aspect='equal', alpha=0.8)
|
||||
|
||||
# Add AP locations
|
||||
for ap_name, ap_coords in ap_locations.items():
|
||||
ap_x, ap_y = ap_coords[:2]
|
||||
ax3.scatter(ap_x, ap_y, s=200, c='red', marker='^', edgecolors='black', linewidth=2, zorder=10)
|
||||
ax3.annotate(ap_name, (ap_x, ap_y), xytext=(5, 5), textcoords='offset points',
|
||||
fontsize=10, fontweight='bold', color='white',
|
||||
bbox=dict(boxstyle="round,pad=0.3", facecolor="red", alpha=0.8))
|
||||
|
||||
ax3.set_xlabel('X (meters)', fontsize=12, fontweight='bold')
|
||||
ax3.set_ylabel('Y (meters)', fontsize=12, fontweight='bold')
|
||||
ax3.set_title('Coverage Quality Map\nGreen: Good Signal (≥-65 dBm), Red: Weak Signal (<-65 dBm)',
|
||||
fontsize=14, fontweight='bold')
|
||||
|
||||
# Add colorbar
|
||||
cbar = plt.colorbar(im, ax=ax3, shrink=0.8)
|
||||
cbar.set_label('Coverage Quality', fontsize=10)
|
||||
cbar.set_ticks([0, 1])
|
||||
cbar.set_ticklabels(['Weak Signal', 'Good Signal'])
|
||||
|
||||
# 4. Signal Strength Statistics
|
||||
ax4 = axes[1, 1]
|
||||
|
||||
# Calculate statistics
|
||||
stats_data = {
|
||||
'Metric': ['Min Signal', 'Max Signal', 'Mean Signal', 'Std Signal', 'Good Coverage %', 'Weak Coverage %'],
|
||||
'Value': [
|
||||
np.min(combined_signal_grid),
|
||||
np.max(combined_signal_grid),
|
||||
np.mean(combined_signal_grid),
|
||||
np.std(combined_signal_grid),
|
||||
np.sum(combined_signal_grid >= -65) / combined_signal_grid.size * 100,
|
||||
np.sum(combined_signal_grid < -65) / combined_signal_grid.size * 100
|
||||
]
|
||||
}
|
||||
|
||||
# Create table
|
||||
table_data = [[stats_data['Metric'][i], f"{stats_data['Value'][i]:.2f}"]
|
||||
for i in range(len(stats_data['Metric']))]
|
||||
|
||||
table = ax4.table(cellText=table_data, colLabels=['Metric', 'Value'],
|
||||
cellLoc='center', loc='center')
|
||||
table.auto_set_font_size(False)
|
||||
table.set_fontsize(10)
|
||||
table.scale(1, 2)
|
||||
|
||||
# Style table
|
||||
for i in range(len(table_data)):
|
||||
for j in range(2):
|
||||
cell = table[(i+1, j)]
|
||||
if i < 4: # Signal statistics
|
||||
cell.set_facecolor('#E6F3FF')
|
||||
else: # Coverage statistics
|
||||
cell.set_facecolor('#E6FFE6' if 'Good' in table_data[i][0] else '#FFE6E6')
|
||||
|
||||
ax4.set_title('Signal Strength Statistics', fontsize=14, fontweight='bold')
|
||||
ax4.axis('off')
|
||||
|
||||
plt.tight_layout()
|
||||
output_path = os.path.join(output_dir, 'signal_quality_analysis.png')
|
||||
plt.savefig(output_path, dpi=300, bbox_inches='tight', facecolor='white')
|
||||
plt.close()
|
||||
|
||||
print(f"Signal quality analysis saved: {output_path}")
|
||||
|
||||
|
||||
# Convenience function for backward compatibility
|
||||
def create_visualization_plots(ap_locations, building_width, building_height, materials_grid, collector, points, output_dir, engine=None, regions: Optional[list] = None, roi_polygon: Optional[list] = None, background_image: Optional[str] = None, image_extent: Optional[list] = None):
|
||||
"""
|
||||
Create comprehensive high-quality heatmap visualizations for AP placement analysis.
|
||||
|
||||
This is a convenience function that creates an AdvancedHeatmapVisualizer instance
|
||||
and calls the comprehensive visualization method.
|
||||
"""
|
||||
if regions is None:
|
||||
regions = []
|
||||
visualizer = AdvancedHeatmapVisualizer(building_width, building_height)
|
||||
visualizer.create_comprehensive_visualizations(
|
||||
ap_locations, materials_grid, collector, points, output_dir, engine, regions=regions, roi_polygon=roi_polygon, background_image=background_image, image_extent=image_extent
|
||||
)
|
||||
699
src/advanced_visualization.py
Normal file
699
src/advanced_visualization.py
Normal file
@@ -0,0 +1,699 @@
|
||||
"""
|
||||
Advanced WiFi AP Visualization System
|
||||
Provides detailed individual AP analysis and comprehensive combined metrics
|
||||
"""
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import seaborn as sns
|
||||
from matplotlib.patches import Circle, Rectangle, Polygon
|
||||
from matplotlib.lines import Line2D
|
||||
import pandas as pd
|
||||
from typing import Dict, List, Tuple, Optional
|
||||
import os
|
||||
from scipy import stats
|
||||
from sklearn.metrics import silhouette_score
|
||||
import logging
|
||||
|
||||
class AdvancedWiFiVisualizer:
|
||||
"""Advanced visualization system for WiFi AP placement analysis"""
|
||||
|
||||
def __init__(self, building_width: float, building_height: float, resolution: float = 0.2):
|
||||
self.building_width = building_width
|
||||
self.building_height = building_height
|
||||
self.resolution = resolution
|
||||
self.setup_style()
|
||||
|
||||
def setup_style(self):
|
||||
"""Setup professional plotting style"""
|
||||
plt.style.use('seaborn-v0_8')
|
||||
sns.set_palette("husl")
|
||||
plt.rcParams['figure.dpi'] = 300
|
||||
plt.rcParams['savefig.dpi'] = 300
|
||||
plt.rcParams['font.size'] = 10
|
||||
plt.rcParams['axes.titlesize'] = 14
|
||||
plt.rcParams['axes.labelsize'] = 12
|
||||
|
||||
def create_individual_ap_analysis(self, ap_locations: Dict, rssi_grids: List[np.ndarray],
|
||||
points: List[Tuple], collector, output_dir: str):
|
||||
"""Create detailed individual AP analysis plots"""
|
||||
logging.info("Creating individual AP analysis plots...")
|
||||
|
||||
for i, (ap_name, ap_coords) in enumerate(ap_locations.items()):
|
||||
if i >= len(rssi_grids):
|
||||
continue
|
||||
|
||||
# Extract AP information
|
||||
x, y = ap_coords[0], ap_coords[1]
|
||||
z = ap_coords[2] if len(ap_coords) > 2 else 0
|
||||
tx_power = ap_coords[3] if len(ap_coords) > 3 else 20.0
|
||||
|
||||
# Create comprehensive individual AP plot
|
||||
fig = plt.figure(figsize=(20, 16))
|
||||
|
||||
# 1. Signal Coverage Map
|
||||
ax1 = plt.subplot(2, 3, 1)
|
||||
self._plot_ap_coverage_map(ax1, ap_name, ap_coords, rssi_grids[i], points)
|
||||
|
||||
# 2. Signal Strength Distribution
|
||||
ax2 = plt.subplot(2, 3, 2)
|
||||
self._plot_signal_distribution(ax2, ap_name, rssi_grids[i])
|
||||
|
||||
# 3. Coverage Statistics
|
||||
ax3 = plt.subplot(2, 3, 3)
|
||||
self._plot_coverage_statistics(ax3, ap_name, rssi_grids[i])
|
||||
|
||||
# 4. Distance vs Signal Strength
|
||||
ax4 = plt.subplot(2, 3, 4)
|
||||
self._plot_distance_vs_signal(ax4, ap_name, ap_coords, points, collector)
|
||||
|
||||
# 5. Coverage Quality Analysis
|
||||
ax5 = plt.subplot(2, 3, 5)
|
||||
self._plot_coverage_quality(ax5, ap_name, rssi_grids[i])
|
||||
|
||||
# 6. AP Performance Metrics
|
||||
ax6 = plt.subplot(2, 3, 6)
|
||||
self._plot_performance_metrics(ax6, ap_name, ap_coords, rssi_grids[i])
|
||||
|
||||
plt.suptitle(f'Advanced Analysis: {ap_name} (z={z:.1f}m, {tx_power:.0f}dBm)',
|
||||
fontsize=16, fontweight='bold')
|
||||
plt.tight_layout()
|
||||
|
||||
# Save individual AP plot
|
||||
output_path = os.path.join(output_dir, f'individual_analysis_{ap_name}.png')
|
||||
plt.savefig(output_path, dpi=300, bbox_inches='tight')
|
||||
plt.close()
|
||||
|
||||
logging.info(f"Created individual analysis for {ap_name}")
|
||||
|
||||
def _plot_ap_coverage_map(self, ax, ap_name: str, ap_coords: Tuple, rssi_grid: np.ndarray, points: List[Tuple]):
|
||||
"""Plot detailed coverage map for individual AP"""
|
||||
x, y = ap_coords[0], ap_coords[1]
|
||||
|
||||
# Create coverage heatmap
|
||||
x_coords = np.array([pt[0] for pt in points])
|
||||
y_coords = np.array([pt[1] for pt in points])
|
||||
x_unique = np.unique(x_coords)
|
||||
y_unique = np.unique(y_coords)
|
||||
|
||||
# Reshape RSSI grid for plotting
|
||||
if len(rssi_grid.shape) == 1:
|
||||
rssi_grid_2d = rssi_grid.reshape((len(y_unique), len(x_unique)))
|
||||
else:
|
||||
rssi_grid_2d = rssi_grid
|
||||
|
||||
# Plot heatmap
|
||||
im = ax.imshow(rssi_grid_2d, extent=[0, self.building_width, 0, self.building_height],
|
||||
origin='lower', cmap='RdYlBu_r', aspect='auto')
|
||||
|
||||
# Add AP location
|
||||
ax.scatter(x, y, s=300, c='red', marker='^', edgecolors='black', linewidth=3, zorder=10)
|
||||
ax.annotate(ap_name, (x, y), xytext=(10, 10), textcoords='offset points',
|
||||
fontsize=12, fontweight='bold', bbox=dict(boxstyle="round,pad=0.3",
|
||||
facecolor="white", alpha=0.8))
|
||||
|
||||
# Add coverage contours
|
||||
levels = [-67, -50, -40, -30]
|
||||
colors = ['red', 'orange', 'yellow', 'green']
|
||||
for level, color in zip(levels, colors):
|
||||
if np.min(rssi_grid_2d) <= level <= np.max(rssi_grid_2d):
|
||||
contour = ax.contour(rssi_grid_2d, levels=[level], colors=color,
|
||||
linewidths=2, alpha=0.8, linestyles='--')
|
||||
ax.clabel(contour, inline=True, fontsize=8, fmt=f'{level} dBm')
|
||||
|
||||
ax.set_title(f'{ap_name} Coverage Map')
|
||||
ax.set_xlabel('X (meters)')
|
||||
ax.set_ylabel('Y (meters)')
|
||||
plt.colorbar(im, ax=ax, label='Signal Strength (dBm)')
|
||||
|
||||
def _plot_signal_distribution(self, ax, ap_name: str, rssi_grid: np.ndarray):
|
||||
"""Plot signal strength distribution"""
|
||||
rssi_values = rssi_grid.flatten()
|
||||
|
||||
# Create histogram with KDE
|
||||
ax.hist(rssi_values, bins=30, alpha=0.7, density=True, color='skyblue', edgecolor='black')
|
||||
|
||||
# Add KDE curve
|
||||
from scipy.stats import gaussian_kde
|
||||
kde = gaussian_kde(rssi_values)
|
||||
x_range = np.linspace(rssi_values.min(), rssi_values.max(), 100)
|
||||
ax.plot(x_range, kde(x_range), 'r-', linewidth=2, label='KDE')
|
||||
|
||||
# Add statistics
|
||||
mean_signal = np.mean(rssi_values)
|
||||
std_signal = np.std(rssi_values)
|
||||
ax.axvline(mean_signal, color='red', linestyle='--', linewidth=2,
|
||||
label=f'Mean: {mean_signal:.1f} dBm')
|
||||
ax.axvline(mean_signal + std_signal, color='orange', linestyle=':', linewidth=2,
|
||||
label=f'+1σ: {mean_signal + std_signal:.1f} dBm')
|
||||
ax.axvline(mean_signal - std_signal, color='orange', linestyle=':', linewidth=2,
|
||||
label=f'-1σ: {mean_signal - std_signal:.1f} dBm')
|
||||
|
||||
ax.set_title(f'{ap_name} Signal Distribution')
|
||||
ax.set_xlabel('Signal Strength (dBm)')
|
||||
ax.set_ylabel('Density')
|
||||
ax.legend()
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_coverage_statistics(self, ax, ap_name: str, rssi_grid: np.ndarray):
|
||||
"""Plot coverage statistics"""
|
||||
rssi_values = rssi_grid.flatten()
|
||||
|
||||
# Calculate coverage metrics
|
||||
excellent_coverage = np.sum(rssi_values >= -40) / len(rssi_values) * 100
|
||||
good_coverage = np.sum((rssi_values >= -50) & (rssi_values < -40)) / len(rssi_values) * 100
|
||||
acceptable_coverage = np.sum((rssi_values >= -67) & (rssi_values < -50)) / len(rssi_values) * 100
|
||||
poor_coverage = np.sum(rssi_values < -67) / len(rssi_values) * 100
|
||||
|
||||
# Create stacked bar chart
|
||||
categories = ['Excellent\n(≥-40 dBm)', 'Good\n(-50 to -40 dBm)',
|
||||
'Acceptable\n(-67 to -50 dBm)', 'Poor\n(<-67 dBm)']
|
||||
values = [excellent_coverage, good_coverage, acceptable_coverage, poor_coverage]
|
||||
colors = ['green', 'yellow', 'orange', 'red']
|
||||
|
||||
bars = ax.bar(categories, values, color=colors, alpha=0.8, edgecolor='black')
|
||||
|
||||
# Add value labels
|
||||
for bar, value in zip(bars, values):
|
||||
height = bar.get_height()
|
||||
ax.text(bar.get_x() + bar.get_width()/2., height + 0.5,
|
||||
f'{value:.1f}%', ha='center', va='bottom', fontweight='bold')
|
||||
|
||||
ax.set_title(f'{ap_name} Coverage Statistics')
|
||||
ax.set_ylabel('Coverage Percentage (%)')
|
||||
ax.set_ylim(0, 100)
|
||||
plt.setp(ax.get_xticklabels(), rotation=45, ha='right')
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_distance_vs_signal(self, ax, ap_name: str, ap_coords: Tuple, points: List[Tuple], collector):
|
||||
"""Plot distance vs signal strength relationship"""
|
||||
x, y = ap_coords[0], ap_coords[1]
|
||||
|
||||
distances = []
|
||||
signals = []
|
||||
|
||||
for pt in points:
|
||||
distance = np.sqrt((pt[0] - x)**2 + (pt[1] - y)**2)
|
||||
signal = collector.calculate_rssi(distance, None)
|
||||
distances.append(distance)
|
||||
signals.append(signal)
|
||||
|
||||
# Create scatter plot
|
||||
ax.scatter(distances, signals, alpha=0.6, s=20, c='blue')
|
||||
|
||||
# Add theoretical path loss curve
|
||||
max_dist = max(distances)
|
||||
dist_range = np.linspace(0, max_dist, 100)
|
||||
theoretical_signals = [collector.calculate_rssi(d, None) for d in dist_range]
|
||||
ax.plot(dist_range, theoretical_signals, 'r--', linewidth=2, label='Theoretical Path Loss')
|
||||
|
||||
# Add coverage thresholds
|
||||
ax.axhline(y=-40, color='green', linestyle='-', alpha=0.7, label='Excellent (-40 dBm)')
|
||||
ax.axhline(y=-50, color='yellow', linestyle='-', alpha=0.7, label='Good (-50 dBm)')
|
||||
ax.axhline(y=-67, color='orange', linestyle='-', alpha=0.7, label='Acceptable (-67 dBm)')
|
||||
|
||||
ax.set_title(f'{ap_name} Distance vs Signal Strength')
|
||||
ax.set_xlabel('Distance (meters)')
|
||||
ax.set_ylabel('Signal Strength (dBm)')
|
||||
ax.legend()
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_coverage_quality(self, ax, ap_name: str, rssi_grid: np.ndarray):
|
||||
"""Plot coverage quality analysis"""
|
||||
rssi_values = rssi_grid.flatten()
|
||||
|
||||
# Calculate quality metrics
|
||||
mean_signal = np.mean(rssi_values)
|
||||
std_signal = np.std(rssi_values)
|
||||
min_signal = np.min(rssi_values)
|
||||
max_signal = np.max(rssi_values)
|
||||
|
||||
# Create radar chart-like visualization
|
||||
metrics = ['Mean Signal', 'Signal Stability', 'Coverage Range', 'Quality Score']
|
||||
values = [
|
||||
(mean_signal + 100) / 100, # Normalize to 0-1
|
||||
1 - (std_signal / 50), # Lower std is better
|
||||
(max_signal - min_signal) / 100, # Coverage range
|
||||
np.sum(rssi_values >= -50) / len(rssi_values) # Quality score
|
||||
]
|
||||
|
||||
# Ensure values are in [0, 1]
|
||||
values = [max(0, min(1, v)) for v in values]
|
||||
|
||||
# Create bar chart
|
||||
bars = ax.bar(metrics, values, color=['skyblue', 'lightgreen', 'lightcoral', 'gold'],
|
||||
alpha=0.8, edgecolor='black')
|
||||
|
||||
# Add value labels
|
||||
for bar, value in zip(bars, values):
|
||||
height = bar.get_height()
|
||||
ax.text(bar.get_x() + bar.get_width()/2., height + 0.02,
|
||||
f'{value:.2f}', ha='center', va='bottom', fontweight='bold')
|
||||
|
||||
ax.set_title(f'{ap_name} Coverage Quality Analysis')
|
||||
ax.set_ylabel('Normalized Score (0-1)')
|
||||
ax.set_ylim(0, 1.1)
|
||||
plt.setp(ax.get_xticklabels(), rotation=45, ha='right')
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_performance_metrics(self, ax, ap_name: str, ap_coords: Tuple, rssi_grid: np.ndarray):
|
||||
"""Plot performance metrics dashboard"""
|
||||
x, y = ap_coords[0], ap_coords[1]
|
||||
z = ap_coords[2] if len(ap_coords) > 2 else 0
|
||||
tx_power = ap_coords[3] if len(ap_coords) > 3 else 20.0
|
||||
|
||||
rssi_values = rssi_grid.flatten()
|
||||
|
||||
# Calculate performance metrics
|
||||
mean_signal = np.mean(rssi_values)
|
||||
coverage_area = np.sum(rssi_values >= -67) / len(rssi_values) * 100
|
||||
signal_variance = np.var(rssi_values)
|
||||
efficiency = (mean_signal + 100) / tx_power # Signal per dBm of power
|
||||
|
||||
# Create metrics display
|
||||
metrics_text = f"""
|
||||
AP Performance Metrics
|
||||
|
||||
Location: ({x:.1f}, {y:.1f}, {z:.1f})
|
||||
TX Power: {tx_power:.1f} dBm
|
||||
|
||||
Mean Signal: {mean_signal:.1f} dBm
|
||||
Coverage Area: {coverage_area:.1f}%
|
||||
Signal Variance: {signal_variance:.1f} dB²
|
||||
Power Efficiency: {efficiency:.2f} dBm/dBm
|
||||
|
||||
Signal Range: {np.min(rssi_values):.1f} to {np.max(rssi_values):.1f} dBm
|
||||
Coverage Quality: {'Excellent' if coverage_area > 90 else 'Good' if coverage_area > 70 else 'Fair'}
|
||||
"""
|
||||
|
||||
ax.text(0.1, 0.9, metrics_text, transform=ax.transAxes, fontsize=10,
|
||||
verticalalignment='top', bbox=dict(boxstyle="round,pad=0.5",
|
||||
facecolor="lightblue", alpha=0.8))
|
||||
|
||||
ax.set_title(f'{ap_name} Performance Dashboard')
|
||||
ax.set_xlim(0, 1)
|
||||
ax.set_ylim(0, 1)
|
||||
ax.axis('off')
|
||||
|
||||
def create_combined_analysis(self, ap_locations: Dict, rssi_grids: List[np.ndarray],
|
||||
points: List[Tuple], output_dir: str):
|
||||
"""Create comprehensive combined analysis"""
|
||||
logging.info("Creating combined AP analysis...")
|
||||
|
||||
# Create large comprehensive plot
|
||||
fig = plt.figure(figsize=(24, 20))
|
||||
|
||||
# 1. Combined Coverage Heatmap
|
||||
ax1 = plt.subplot(3, 4, 1)
|
||||
self._plot_combined_coverage_heatmap(ax1, ap_locations, rssi_grids, points)
|
||||
|
||||
# 2. AP Performance Comparison
|
||||
ax2 = plt.subplot(3, 4, 2)
|
||||
self._plot_ap_performance_comparison(ax2, ap_locations, rssi_grids)
|
||||
|
||||
# 3. Coverage Overlap Analysis
|
||||
ax3 = plt.subplot(3, 4, 3)
|
||||
self._plot_coverage_overlap(ax3, ap_locations, rssi_grids)
|
||||
|
||||
# 4. Signal Quality Distribution
|
||||
ax4 = plt.subplot(3, 4, 4)
|
||||
self._plot_combined_signal_quality(ax4, rssi_grids)
|
||||
|
||||
# 5. AP Placement Analysis
|
||||
ax5 = plt.subplot(3, 4, 5)
|
||||
self._plot_ap_placement_analysis(ax5, ap_locations)
|
||||
|
||||
# 6. Interference Analysis
|
||||
ax6 = plt.subplot(3, 4, 6)
|
||||
self._plot_interference_analysis(ax6, ap_locations, rssi_grids)
|
||||
|
||||
# 7. Coverage Efficiency
|
||||
ax7 = plt.subplot(3, 4, 7)
|
||||
self._plot_coverage_efficiency(ax7, ap_locations, rssi_grids)
|
||||
|
||||
# 8. Signal Strength Statistics
|
||||
ax8 = plt.subplot(3, 4, 8)
|
||||
self._plot_signal_statistics(ax8, rssi_grids)
|
||||
|
||||
# 9. AP Load Distribution
|
||||
ax9 = plt.subplot(3, 4, 9)
|
||||
self._plot_ap_load_distribution(ax9, ap_locations, rssi_grids, points)
|
||||
|
||||
# 10. Coverage Gaps Analysis
|
||||
ax10 = plt.subplot(3, 4, 10)
|
||||
self._plot_coverage_gaps(ax10, ap_locations, rssi_grids, points)
|
||||
|
||||
# 11. Power Efficiency Analysis
|
||||
ax11 = plt.subplot(3, 4, 11)
|
||||
self._plot_power_efficiency(ax11, ap_locations, rssi_grids)
|
||||
|
||||
# 12. Overall System Metrics
|
||||
ax12 = plt.subplot(3, 4, 12)
|
||||
self._plot_system_metrics(ax12, ap_locations, rssi_grids)
|
||||
|
||||
plt.suptitle('Advanced WiFi AP System Analysis', fontsize=20, fontweight='bold')
|
||||
plt.tight_layout()
|
||||
|
||||
# Save combined analysis
|
||||
output_path = os.path.join(output_dir, 'advanced_combined_analysis.png')
|
||||
plt.savefig(output_path, dpi=300, bbox_inches='tight')
|
||||
plt.close()
|
||||
|
||||
logging.info("Created advanced combined analysis")
|
||||
|
||||
def _plot_combined_coverage_heatmap(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray], points: List[Tuple]):
|
||||
"""Plot combined coverage heatmap"""
|
||||
# Combine all RSSI grids
|
||||
combined_grid = np.max(np.stack(rssi_grids), axis=0)
|
||||
|
||||
# Create heatmap
|
||||
im = ax.imshow(combined_grid, extent=[0, self.building_width, 0, self.building_height],
|
||||
origin='lower', cmap='RdYlBu_r', aspect='auto')
|
||||
|
||||
# Add AP locations
|
||||
colors = ['red', 'blue', 'green', 'orange', 'purple', 'brown', 'pink', 'gray', 'olive', 'cyan'][:len(ap_locations)]
|
||||
for i, (ap_name, ap_coords) in enumerate(ap_locations.items()):
|
||||
x, y = ap_coords[0], ap_coords[1]
|
||||
ax.scatter(x, y, s=200, c=[colors[i]], marker='^', edgecolors='black',
|
||||
linewidth=2, zorder=10, label=ap_name)
|
||||
ax.annotate(ap_name, (x, y), xytext=(5, 5), textcoords='offset points',
|
||||
fontsize=8, fontweight='bold')
|
||||
|
||||
ax.set_title('Combined Coverage Heatmap')
|
||||
ax.set_xlabel('X (meters)')
|
||||
ax.set_ylabel('Y (meters)')
|
||||
plt.colorbar(im, ax=ax, label='Signal Strength (dBm)')
|
||||
ax.legend(bbox_to_anchor=(1.05, 1), loc='upper left')
|
||||
|
||||
def _plot_ap_performance_comparison(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray]):
|
||||
"""Plot AP performance comparison"""
|
||||
ap_names = list(ap_locations.keys())
|
||||
mean_signals = []
|
||||
coverage_areas = []
|
||||
|
||||
for rssi_grid in rssi_grids:
|
||||
rssi_values = rssi_grid.flatten()
|
||||
mean_signals.append(np.mean(rssi_values))
|
||||
coverage_areas.append(np.sum(rssi_values >= -67) / len(rssi_values) * 100)
|
||||
|
||||
x = np.arange(len(ap_names))
|
||||
width = 0.35
|
||||
|
||||
bars1 = ax.bar(x - width/2, mean_signals, width, label='Mean Signal (dBm)', alpha=0.8)
|
||||
bars2 = ax.bar(x + width/2, coverage_areas, width, label='Coverage Area (%)', alpha=0.8)
|
||||
|
||||
ax.set_title('AP Performance Comparison')
|
||||
ax.set_xlabel('Access Points')
|
||||
ax.set_ylabel('Performance Metrics')
|
||||
ax.set_xticks(x)
|
||||
ax.set_xticklabels(ap_names, rotation=45, ha='right')
|
||||
ax.legend()
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
# Add value labels
|
||||
for bars in [bars1, bars2]:
|
||||
for bar in bars:
|
||||
height = bar.get_height()
|
||||
ax.text(bar.get_x() + bar.get_width()/2., height + 0.5,
|
||||
f'{height:.1f}', ha='center', va='bottom', fontsize=8)
|
||||
|
||||
def _plot_coverage_overlap(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray]):
|
||||
"""Plot coverage overlap analysis"""
|
||||
# Calculate overlap matrix
|
||||
n_aps = len(ap_locations)
|
||||
overlap_matrix = np.zeros((n_aps, n_aps))
|
||||
|
||||
for i in range(n_aps):
|
||||
for j in range(n_aps):
|
||||
if i != j:
|
||||
# Calculate overlap between AP i and AP j
|
||||
coverage_i = rssi_grids[i] >= -67
|
||||
coverage_j = rssi_grids[j] >= -67
|
||||
overlap = np.sum(coverage_i & coverage_j) / np.sum(coverage_i | coverage_j)
|
||||
overlap_matrix[i, j] = overlap
|
||||
|
||||
# Plot overlap heatmap
|
||||
im = ax.imshow(overlap_matrix, cmap='YlOrRd', aspect='auto')
|
||||
ax.set_title('Coverage Overlap Analysis')
|
||||
ax.set_xlabel('AP Index')
|
||||
ax.set_ylabel('AP Index')
|
||||
|
||||
# Add text annotations
|
||||
for i in range(n_aps):
|
||||
for j in range(n_aps):
|
||||
if i != j:
|
||||
text = ax.text(j, i, f'{overlap_matrix[i, j]:.2f}',
|
||||
ha="center", va="center", color="black", fontsize=8)
|
||||
|
||||
plt.colorbar(im, ax=ax, label='Overlap Ratio')
|
||||
|
||||
def _plot_combined_signal_quality(self, ax, rssi_grids: List[np.ndarray]):
|
||||
"""Plot combined signal quality distribution"""
|
||||
all_signals = []
|
||||
for rssi_grid in rssi_grids:
|
||||
all_signals.extend(rssi_grid.flatten())
|
||||
|
||||
# Create quality categories
|
||||
excellent = np.sum(np.array(all_signals) >= -40)
|
||||
good = np.sum((np.array(all_signals) >= -50) & (np.array(all_signals) < -40))
|
||||
acceptable = np.sum((np.array(all_signals) >= -67) & (np.array(all_signals) < -50))
|
||||
poor = np.sum(np.array(all_signals) < -67)
|
||||
|
||||
categories = ['Excellent\n(≥-40 dBm)', 'Good\n(-50 to -40 dBm)',
|
||||
'Acceptable\n(-67 to -50 dBm)', 'Poor\n(<-67 dBm)']
|
||||
values = [excellent, good, acceptable, poor]
|
||||
colors = ['green', 'yellow', 'orange', 'red']
|
||||
|
||||
wedges, texts, autotexts = ax.pie(values, labels=categories, colors=colors, autopct='%1.1f%%',
|
||||
startangle=90)
|
||||
ax.set_title('Combined Signal Quality Distribution')
|
||||
|
||||
def _plot_ap_placement_analysis(self, ax, ap_locations: Dict):
|
||||
"""Plot AP placement analysis"""
|
||||
x_coords = [ap_coords[0] for ap_coords in ap_locations.values()]
|
||||
y_coords = [ap_coords[1] for ap_coords in ap_locations.values()]
|
||||
z_coords = [ap_coords[2] if len(ap_coords) > 2 else 0 for ap_coords in ap_locations.values()]
|
||||
|
||||
# Create 3D-like visualization
|
||||
scatter = ax.scatter(x_coords, y_coords, s=[100 + z*5 for z in z_coords],
|
||||
c=z_coords, cmap='viridis', alpha=0.7, edgecolors='black')
|
||||
|
||||
# Add AP labels
|
||||
for i, (ap_name, ap_coords) in enumerate(ap_locations.items()):
|
||||
ax.annotate(ap_name, (ap_coords[0], ap_coords[1]), xytext=(5, 5),
|
||||
textcoords='offset points', fontsize=8, fontweight='bold')
|
||||
|
||||
ax.set_title('AP Placement Analysis')
|
||||
ax.set_xlabel('X (meters)')
|
||||
ax.set_ylabel('Y (meters)')
|
||||
ax.set_xlim(0, self.building_width)
|
||||
ax.set_ylim(0, self.building_height)
|
||||
plt.colorbar(scatter, ax=ax, label='Z-coordinate (m)')
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_interference_analysis(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray]):
|
||||
"""Plot interference analysis"""
|
||||
# Calculate interference at each point
|
||||
interference_levels = []
|
||||
|
||||
for i in range(len(rssi_grids[0].flatten())):
|
||||
signals = [grid.flatten()[i] for grid in rssi_grids]
|
||||
if len(signals) > 1:
|
||||
# Calculate interference as sum of all signals except the strongest
|
||||
sorted_signals = sorted(signals, reverse=True)
|
||||
interference = 10 * np.log10(sum(10**(s/10) for s in sorted_signals[1:]))
|
||||
interference_levels.append(interference)
|
||||
|
||||
# Plot interference distribution
|
||||
ax.hist(interference_levels, bins=30, alpha=0.7, color='red', edgecolor='black')
|
||||
ax.axvline(np.mean(interference_levels), color='blue', linestyle='--',
|
||||
linewidth=2, label=f'Mean: {np.mean(interference_levels):.1f} dBm')
|
||||
|
||||
ax.set_title('Interference Analysis')
|
||||
ax.set_xlabel('Interference Level (dBm)')
|
||||
ax.set_ylabel('Frequency')
|
||||
ax.legend()
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_coverage_efficiency(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray]):
|
||||
"""Plot coverage efficiency analysis"""
|
||||
ap_names = list(ap_locations.keys())
|
||||
efficiencies = []
|
||||
|
||||
for i, rssi_grid in enumerate(rssi_grids):
|
||||
rssi_values = rssi_grid.flatten()
|
||||
coverage_area = np.sum(rssi_values >= -67) / len(rssi_values)
|
||||
tx_power = ap_locations[ap_names[i]][3] if len(ap_locations[ap_names[i]]) > 3 else 20.0
|
||||
efficiency = coverage_area / tx_power # Coverage per dBm
|
||||
efficiencies.append(efficiency)
|
||||
|
||||
bars = ax.bar(ap_names, efficiencies, color='lightgreen', alpha=0.8, edgecolor='black')
|
||||
|
||||
# Add value labels
|
||||
for bar, efficiency in zip(bars, efficiencies):
|
||||
height = bar.get_height()
|
||||
ax.text(bar.get_x() + bar.get_width()/2., height + 0.001,
|
||||
f'{efficiency:.3f}', ha='center', va='bottom', fontsize=8)
|
||||
|
||||
ax.set_title('Coverage Efficiency (Coverage/Dbm)')
|
||||
ax.set_ylabel('Efficiency')
|
||||
plt.setp(ax.get_xticklabels(), rotation=45, ha='right')
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_signal_statistics(self, ax, rssi_grids: List[np.ndarray]):
|
||||
"""Plot signal statistics"""
|
||||
all_signals = []
|
||||
for rssi_grid in rssi_grids:
|
||||
all_signals.extend(rssi_grid.flatten())
|
||||
|
||||
# Calculate statistics
|
||||
mean_signal = np.mean(all_signals)
|
||||
std_signal = np.std(all_signals)
|
||||
min_signal = np.min(all_signals)
|
||||
max_signal = np.max(all_signals)
|
||||
|
||||
# Create statistics display
|
||||
stats_text = f"""
|
||||
Overall Signal Statistics
|
||||
|
||||
Mean Signal: {mean_signal:.1f} dBm
|
||||
Std Deviation: {std_signal:.1f} dBm
|
||||
Min Signal: {min_signal:.1f} dBm
|
||||
Max Signal: {max_signal:.1f} dBm
|
||||
Signal Range: {max_signal - min_signal:.1f} dBm
|
||||
|
||||
Coverage Quality:
|
||||
• Excellent (≥-40 dBm): {np.sum(np.array(all_signals) >= -40) / len(all_signals) * 100:.1f}%
|
||||
• Good (-50 to -40 dBm): {np.sum((np.array(all_signals) >= -50) & (np.array(all_signals) < -40)) / len(all_signals) * 100:.1f}%
|
||||
• Acceptable (-67 to -50 dBm): {np.sum((np.array(all_signals) >= -67) & (np.array(all_signals) < -50)) / len(all_signals) * 100:.1f}%
|
||||
• Poor (<-67 dBm): {np.sum(np.array(all_signals) < -67) / len(all_signals) * 100:.1f}%
|
||||
"""
|
||||
|
||||
ax.text(0.1, 0.9, stats_text, transform=ax.transAxes, fontsize=9,
|
||||
verticalalignment='top', bbox=dict(boxstyle="round,pad=0.5",
|
||||
facecolor="lightblue", alpha=0.8))
|
||||
|
||||
ax.set_title('Signal Statistics Summary')
|
||||
ax.set_xlim(0, 1)
|
||||
ax.set_ylim(0, 1)
|
||||
ax.axis('off')
|
||||
|
||||
def _plot_ap_load_distribution(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray], points: List[Tuple]):
|
||||
"""Plot AP load distribution"""
|
||||
ap_names = list(ap_locations.keys())
|
||||
load_distribution = []
|
||||
|
||||
# Calculate load for each AP (number of points where it's the strongest)
|
||||
for i, rssi_grid in enumerate(rssi_grids):
|
||||
load = 0
|
||||
for j, rssi_grid_other in enumerate(rssi_grids):
|
||||
if i != j:
|
||||
# Count points where this AP is stronger
|
||||
stronger_points = np.sum(rssi_grid > rssi_grid_other)
|
||||
load += stronger_points
|
||||
load_distribution.append(load)
|
||||
|
||||
# Normalize load
|
||||
total_load = sum(load_distribution)
|
||||
load_percentages = [load/total_load*100 for load in load_distribution]
|
||||
|
||||
bars = ax.bar(ap_names, load_percentages, color='lightcoral', alpha=0.8, edgecolor='black')
|
||||
|
||||
# Add value labels
|
||||
for bar, percentage in zip(bars, load_percentages):
|
||||
height = bar.get_height()
|
||||
ax.text(bar.get_x() + bar.get_width()/2., height + 0.5,
|
||||
f'{percentage:.1f}%', ha='center', va='bottom', fontsize=8)
|
||||
|
||||
ax.set_title('AP Load Distribution')
|
||||
ax.set_ylabel('Load Percentage (%)')
|
||||
plt.setp(ax.get_xticklabels(), rotation=45, ha='right')
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_coverage_gaps(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray], points: List[Tuple]):
|
||||
"""Plot coverage gaps analysis"""
|
||||
# Find coverage gaps
|
||||
combined_grid = np.max(np.stack(rssi_grids), axis=0)
|
||||
coverage_gaps = combined_grid < -67
|
||||
|
||||
# Create gap visualization
|
||||
gap_im = ax.imshow(coverage_gaps, extent=[0, self.building_width, 0, self.building_height],
|
||||
origin='lower', cmap='Reds', aspect='auto')
|
||||
|
||||
# Add AP locations
|
||||
for ap_name, ap_coords in ap_locations.items():
|
||||
x, y = ap_coords[0], ap_coords[1]
|
||||
ax.scatter(x, y, s=100, c='blue', marker='^', edgecolors='white',
|
||||
linewidth=2, zorder=10)
|
||||
ax.annotate(ap_name, (x, y), xytext=(5, 5), textcoords='offset points',
|
||||
fontsize=8, fontweight='bold', color='white')
|
||||
|
||||
gap_percentage = np.sum(coverage_gaps) / coverage_gaps.size * 100
|
||||
ax.set_title(f'Coverage Gaps Analysis\n({gap_percentage:.1f}% gaps)')
|
||||
ax.set_xlabel('X (meters)')
|
||||
ax.set_ylabel('Y (meters)')
|
||||
plt.colorbar(gap_im, ax=ax, label='Coverage Gap')
|
||||
|
||||
def _plot_power_efficiency(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray]):
|
||||
"""Plot power efficiency analysis"""
|
||||
ap_names = list(ap_locations.keys())
|
||||
power_efficiencies = []
|
||||
|
||||
for i, (ap_name, ap_coords) in enumerate(ap_locations.items()):
|
||||
tx_power = ap_coords[3] if len(ap_coords) > 3 else 20.0
|
||||
rssi_values = rssi_grids[i].flatten()
|
||||
mean_signal = np.mean(rssi_values)
|
||||
efficiency = (mean_signal + 100) / tx_power # Signal per dBm
|
||||
power_efficiencies.append(efficiency)
|
||||
|
||||
bars = ax.bar(ap_names, power_efficiencies, color='gold', alpha=0.8, edgecolor='black')
|
||||
|
||||
# Add value labels
|
||||
for bar, efficiency in zip(bars, power_efficiencies):
|
||||
height = bar.get_height()
|
||||
ax.text(bar.get_x() + bar.get_width()/2., height + 0.1,
|
||||
f'{efficiency:.2f}', ha='center', va='bottom', fontsize=8)
|
||||
|
||||
ax.set_title('Power Efficiency (Signal/Dbm)')
|
||||
ax.set_ylabel('Efficiency')
|
||||
plt.setp(ax.get_xticklabels(), rotation=45, ha='right')
|
||||
ax.grid(True, alpha=0.3)
|
||||
|
||||
def _plot_system_metrics(self, ax, ap_locations: Dict, rssi_grids: List[np.ndarray]):
|
||||
"""Plot overall system metrics"""
|
||||
# Calculate system-wide metrics
|
||||
combined_grid = np.max(np.stack(rssi_grids), axis=0)
|
||||
all_signals = combined_grid.flatten()
|
||||
|
||||
total_coverage = np.sum(all_signals >= -67) / len(all_signals) * 100
|
||||
mean_signal = np.mean(all_signals)
|
||||
signal_variance = np.var(all_signals)
|
||||
total_power = sum(ap_coords[3] if len(ap_coords) > 3 else 20.0 for ap_coords in ap_locations.values())
|
||||
|
||||
# Create metrics display
|
||||
metrics_text = f"""
|
||||
System Performance Summary
|
||||
|
||||
Total APs: {len(ap_locations)}
|
||||
Total Coverage: {total_coverage:.1f}%
|
||||
Mean Signal: {mean_signal:.1f} dBm
|
||||
Signal Variance: {signal_variance:.1f} dB²
|
||||
Total Power: {total_power:.1f} dBm
|
||||
|
||||
Coverage Quality:
|
||||
• Excellent: {np.sum(all_signals >= -40) / len(all_signals) * 100:.1f}%
|
||||
• Good: {np.sum((all_signals >= -50) & (all_signals < -40)) / len(all_signals) * 100:.1f}%
|
||||
• Acceptable: {np.sum((all_signals >= -67) & (all_signals < -50)) / len(all_signals) * 100:.1f}%
|
||||
• Poor: {np.sum(all_signals < -67) / len(all_signals) * 100:.1f}%
|
||||
|
||||
System Efficiency: {total_coverage / total_power:.2f}%/dBm
|
||||
"""
|
||||
|
||||
ax.text(0.1, 0.9, metrics_text, transform=ax.transAxes, fontsize=9,
|
||||
verticalalignment='top', bbox=dict(boxstyle="round,pad=0.5",
|
||||
facecolor="lightgreen", alpha=0.8))
|
||||
|
||||
ax.set_title('System Performance Metrics')
|
||||
ax.set_xlim(0, 1)
|
||||
ax.set_ylim(0, 1)
|
||||
ax.axis('off')
|
||||
1
src/data_collection/__init__.py
Normal file
1
src/data_collection/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Data collection package."""
|
||||
107
src/data_collection/collector.py
Normal file
107
src/data_collection/collector.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import time
|
||||
from datetime import datetime
|
||||
import os
|
||||
|
||||
class WiFiDataCollector:
|
||||
def __init__(self, simulation_mode=True):
|
||||
"""Initialize the WiFi data collector.
|
||||
|
||||
Args:
|
||||
simulation_mode (bool): Whether to use simulated data
|
||||
"""
|
||||
self.simulation_mode = simulation_mode
|
||||
|
||||
def collect_training_data(self, duration_minutes=60, interval_seconds=1):
|
||||
"""Collect WiFi signal strength data for training.
|
||||
|
||||
Args:
|
||||
duration_minutes (int): Duration to collect data in minutes
|
||||
interval_seconds (int): Interval between measurements in seconds
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: Collected WiFi data
|
||||
"""
|
||||
if self.simulation_mode:
|
||||
return self._generate_simulated_data(duration_minutes, interval_seconds)
|
||||
else:
|
||||
return self._collect_real_data(duration_minutes, interval_seconds)
|
||||
|
||||
def _generate_simulated_data(self, duration_minutes, interval_seconds):
|
||||
"""Generate simulated WiFi data.
|
||||
|
||||
Args:
|
||||
duration_minutes (int): Duration to generate data for
|
||||
interval_seconds (int): Interval between measurements
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: Generated WiFi data
|
||||
"""
|
||||
# Calculate number of samples
|
||||
n_samples = int((duration_minutes * 60) / interval_seconds)
|
||||
|
||||
# Generate simulated access points
|
||||
ap_configs = [
|
||||
{'ssid': 'AP1', 'x': 0.2, 'y': 0.3, 'power': -30},
|
||||
{'ssid': 'AP2', 'x': 0.5, 'y': 0.4, 'power': -30},
|
||||
{'ssid': 'AP3', 'x': 0.8, 'y': 0.2, 'power': -30}
|
||||
]
|
||||
|
||||
# Generate data for each access point
|
||||
data = []
|
||||
for t in range(n_samples):
|
||||
timestamp = datetime.now().timestamp() + t * interval_seconds
|
||||
|
||||
for ap in ap_configs:
|
||||
# Add random movement to simulate walking around
|
||||
x = np.random.normal(ap['x'], 0.1)
|
||||
y = np.random.normal(ap['y'], 0.1)
|
||||
|
||||
# Calculate distance-based signal strength with noise
|
||||
distance = np.sqrt((x - 0.5)**2 + (y - 0.5)**2)
|
||||
rssi = ap['power'] - 20 * np.log10(max(distance, 0.1))
|
||||
rssi += np.random.normal(0, 2) # Add noise
|
||||
|
||||
data.append({
|
||||
'timestamp': timestamp,
|
||||
'ssid': ap['ssid'],
|
||||
'bssid': f"00:11:22:33:44:{55+ap_configs.index(ap):02x}",
|
||||
'rssi': rssi,
|
||||
'channel': 1 + ap_configs.index(ap) * 5,
|
||||
'security': 'WPA2',
|
||||
'x': x,
|
||||
'y': y
|
||||
})
|
||||
|
||||
# Convert to DataFrame
|
||||
df = pd.DataFrame(data)
|
||||
|
||||
# Save to CSV
|
||||
os.makedirs('data', exist_ok=True)
|
||||
output_file = f'data/wifi_data_{datetime.now().strftime("%Y%m%d_%H%M%S")}.csv'
|
||||
df.to_csv(output_file, index=False)
|
||||
print(f"Data saved to {output_file}")
|
||||
print(f"Collected {len(df)} data points\n")
|
||||
|
||||
return df
|
||||
|
||||
def _collect_real_data(self, duration_minutes, interval_seconds):
|
||||
"""Collect real WiFi data (not implemented).
|
||||
|
||||
Args:
|
||||
duration_minutes (int): Duration to collect data
|
||||
interval_seconds (int): Interval between measurements
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: Collected WiFi data
|
||||
"""
|
||||
raise NotImplementedError("Real data collection not implemented yet. Use simulation_mode=True")
|
||||
|
||||
if __name__ == "__main__":
|
||||
collector = WiFiDataCollector(simulation_mode=True)
|
||||
print("Starting WiFi data collection (simulation mode)...")
|
||||
data = collector.collect_training_data(duration_minutes=60)
|
||||
print(f"Collected {len(data)} data points")
|
||||
print("\nSample of collected data:")
|
||||
print(data.head())
|
||||
170
src/data_collection/wifi_data_collector.py
Normal file
170
src/data_collection/wifi_data_collector.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""Module for collecting and simulating WiFi signal strength data."""
|
||||
|
||||
import numpy as np
|
||||
from typing import List, Tuple, Optional
|
||||
from src.physics.materials import SignalPath, Material, MATERIALS
|
||||
|
||||
class WiFiDataCollector:
|
||||
"""Collects and simulates WiFi signal strength data with material effects."""
|
||||
|
||||
def __init__(self, tx_power: float = 20.0, frequency: float = 2.4e9):
|
||||
"""Initialize the WiFi data collector.
|
||||
|
||||
Args:
|
||||
tx_power (float): Transmit power in dBm
|
||||
frequency (float): Signal frequency in Hz (default: 2.4 GHz)
|
||||
"""
|
||||
self.tx_power = tx_power
|
||||
self.frequency = frequency
|
||||
self.noise_floor = -96.0 # Typical WiFi noise floor in dBm
|
||||
|
||||
def calculate_free_space_loss(self, distance: float) -> float:
|
||||
"""Calculate free space path loss.
|
||||
|
||||
Args:
|
||||
distance (float): Distance in meters
|
||||
|
||||
Returns:
|
||||
float: Path loss in dB
|
||||
"""
|
||||
c = 3e8 # Speed of light
|
||||
wavelength = c / self.frequency
|
||||
|
||||
# Free space path loss formula
|
||||
if distance == 0:
|
||||
return 0
|
||||
return 20 * np.log10(4 * np.pi * distance / wavelength)
|
||||
|
||||
def calculate_material_loss(self, signal_path: SignalPath) -> float:
|
||||
"""Calculate signal loss due to materials.
|
||||
|
||||
Args:
|
||||
signal_path (SignalPath): Path containing material layers
|
||||
|
||||
Returns:
|
||||
float: Material loss in dB
|
||||
"""
|
||||
return signal_path.calculate_total_attenuation(self.frequency)
|
||||
|
||||
def add_multipath_effects(self, rssi: float, n_paths: int = 3) -> float:
|
||||
"""Simulate multipath effects on signal strength.
|
||||
|
||||
Args:
|
||||
rssi (float): Original RSSI value
|
||||
n_paths (int): Number of reflection paths to simulate
|
||||
|
||||
Returns:
|
||||
float: RSSI with multipath effects
|
||||
"""
|
||||
# Generate random path delays and attenuations
|
||||
path_losses = np.random.uniform(3, 20, n_paths) # Additional loss per path in dB
|
||||
path_phases = np.random.uniform(0, 2*np.pi, n_paths) # Random phases
|
||||
|
||||
# Convert RSSI to linear power (handle negative values)
|
||||
power_linear = 10 ** (rssi/10) if rssi > -100 else 1e-10
|
||||
|
||||
# Add multipath components
|
||||
for loss, phase in zip(path_losses, path_phases):
|
||||
reflected_power = 10 ** ((rssi - loss)/10) if (rssi - loss) > -100 else 1e-10
|
||||
power_linear += reflected_power * np.cos(phase) # Coherent addition
|
||||
|
||||
# Ensure power is positive before log
|
||||
power_linear = max(power_linear, 1e-10)
|
||||
|
||||
# Convert back to dB
|
||||
return 10 * np.log10(power_linear)
|
||||
|
||||
def calculate_rssi(self,
|
||||
distance: float,
|
||||
signal_path: Optional[SignalPath] = None,
|
||||
include_multipath: bool = True) -> float:
|
||||
"""Calculate RSSI at a given distance considering materials and multipath.
|
||||
|
||||
Args:
|
||||
distance (float): Distance in meters
|
||||
signal_path (Optional[SignalPath]): Path with materials
|
||||
include_multipath (bool): Whether to include multipath effects
|
||||
|
||||
Returns:
|
||||
float: RSSI value in dBm
|
||||
"""
|
||||
# Calculate free space path loss
|
||||
path_loss = self.calculate_free_space_loss(distance)
|
||||
|
||||
# Add material losses if path is provided
|
||||
material_loss = 0
|
||||
if signal_path is not None:
|
||||
material_loss = self.calculate_material_loss(signal_path)
|
||||
|
||||
# Calculate basic RSSI
|
||||
rssi = self.tx_power - path_loss - material_loss
|
||||
|
||||
# Add multipath effects if requested
|
||||
if include_multipath:
|
||||
rssi = self.add_multipath_effects(rssi)
|
||||
|
||||
# Ensure we don't go below noise floor
|
||||
return max(rssi, self.noise_floor)
|
||||
|
||||
def collect_samples(self,
|
||||
points: List[Tuple[float, float]],
|
||||
ap_location: Tuple[float, float],
|
||||
materials_grid: Optional[List[List[Material]]] = None) -> np.ndarray:
|
||||
"""Collect RSSI samples for given points considering materials.
|
||||
|
||||
Args:
|
||||
points: List of (x, y) measurement points
|
||||
ap_location: (x, y) location of access point
|
||||
materials_grid: Optional 2D grid of materials
|
||||
|
||||
Returns:
|
||||
numpy array of RSSI values
|
||||
"""
|
||||
samples = []
|
||||
ap_x, ap_y = ap_location
|
||||
|
||||
for x, y in points:
|
||||
# Calculate distance
|
||||
distance = np.sqrt((x - ap_x)**2 + (y - ap_y)**2)
|
||||
|
||||
# Create signal path if materials grid is provided
|
||||
signal_path = None
|
||||
if materials_grid is not None:
|
||||
signal_path = SignalPath()
|
||||
|
||||
# Simple ray tracing - check materials along direct line
|
||||
if distance > 0.1: # Only do ray tracing for non-zero distances
|
||||
# Calculate step sizes for ray tracing
|
||||
steps = max(int(distance * 2), 3) # At least 3 steps to avoid division by zero
|
||||
|
||||
# Safety check to prevent division by zero
|
||||
if steps > 1:
|
||||
dx = (x - ap_x) / (steps - 1)
|
||||
dy = (y - ap_y) / (steps - 1)
|
||||
|
||||
# Track unique materials encountered
|
||||
materials_seen = set()
|
||||
|
||||
# Trace ray from AP to measurement point
|
||||
for i in range(steps):
|
||||
# Current position along ray
|
||||
curr_x = ap_x + dx * i
|
||||
curr_y = ap_y + dy * i
|
||||
|
||||
# Convert to grid indices
|
||||
grid_x = int(curr_x * 2) # Assuming 0.5m resolution
|
||||
grid_y = int(curr_y * 2)
|
||||
|
||||
# Check if indices are valid
|
||||
if (0 <= grid_y < len(materials_grid) and
|
||||
0 <= grid_x < len(materials_grid[0])):
|
||||
material = materials_grid[grid_y][grid_x]
|
||||
if isinstance(material, Material) and material not in materials_seen:
|
||||
materials_seen.add(material)
|
||||
signal_path.add_layer(material)
|
||||
|
||||
# Calculate RSSI
|
||||
rssi = self.calculate_rssi(distance, signal_path)
|
||||
samples.append(rssi)
|
||||
|
||||
return np.array(samples)
|
||||
844
src/enhanced_floor_plan_processor.py
Normal file
844
src/enhanced_floor_plan_processor.py
Normal file
@@ -0,0 +1,844 @@
|
||||
"""
|
||||
Enhanced Floor Plan Processor for WiFi Signal Prediction
|
||||
|
||||
This module extends the original floor plan processor to support custom building boundaries
|
||||
(polygon shapes) instead of forcing rectangular dimensions. This allows for more realistic
|
||||
building layouts with irregular shapes.
|
||||
"""
|
||||
|
||||
import matplotlib
|
||||
matplotlib.use('Agg') # Use non-interactive backend
|
||||
import cv2
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.patches import Rectangle, Polygon
|
||||
from matplotlib.path import Path
|
||||
import os
|
||||
from typing import Dict, List, Tuple, Optional
|
||||
from src.physics.materials import MATERIALS
|
||||
from src.visualization.building_visualizer import BuildingVisualizer
|
||||
import json
|
||||
|
||||
class EnhancedFloorPlanProcessor:
|
||||
def __init__(self):
|
||||
self.image = None
|
||||
self.image_path = None
|
||||
self.width_meters = None
|
||||
self.height_meters = None
|
||||
self.materials_grid = None
|
||||
self.visualizer = None
|
||||
self.regions = [] # List of (x, y, w, h, material) tuples
|
||||
self.resolution = 0.2 # 20 cm resolution
|
||||
self.building_boundary = None # List of (x, y) tuples defining building perimeter
|
||||
self.use_custom_boundary = False # Flag to use custom boundary instead of rectangular
|
||||
|
||||
def load_image(self, image_path: str) -> bool:
|
||||
"""
|
||||
Load a floor plan image (JPEG/PNG).
|
||||
|
||||
Args:
|
||||
image_path: Path to the floor plan image
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
if not os.path.exists(image_path):
|
||||
print(f"Error: Image file not found at {image_path}")
|
||||
return False
|
||||
|
||||
self.image = cv2.imread(image_path)
|
||||
if self.image is None:
|
||||
print(f"Error: Could not load image from {image_path}")
|
||||
return False
|
||||
|
||||
self.image = cv2.cvtColor(self.image, cv2.COLOR_BGR2RGB)
|
||||
self.image_path = image_path
|
||||
|
||||
print(f"Successfully loaded image: {image_path}")
|
||||
print(f"Image dimensions: {self.image.shape[1]} x {self.image.shape[0]} pixels")
|
||||
return True
|
||||
|
||||
def set_building_dimensions(self, width_meters: float, height_meters: float):
|
||||
"""
|
||||
Set the real-world dimensions of the building (for rectangular boundaries).
|
||||
|
||||
Args:
|
||||
width_meters: Building width in meters
|
||||
height_meters: Building height in meters
|
||||
"""
|
||||
self.width_meters = width_meters
|
||||
self.height_meters = height_meters
|
||||
self.use_custom_boundary = False
|
||||
print(f"Building dimensions set to: {width_meters}m x {height_meters}m (rectangular)")
|
||||
|
||||
def set_custom_building_boundary(self, boundary_points: List[Tuple[float, float]]):
|
||||
"""
|
||||
Set a custom building boundary using polygon points.
|
||||
|
||||
Args:
|
||||
boundary_points: List of (x, y) tuples defining the building perimeter in meters
|
||||
"""
|
||||
if len(boundary_points) < 3:
|
||||
print("Error: Boundary must have at least 3 points to form a polygon")
|
||||
return False
|
||||
|
||||
self.building_boundary = boundary_points
|
||||
self.use_custom_boundary = True
|
||||
|
||||
# Calculate bounding box for the custom boundary
|
||||
x_coords = [p[0] for p in boundary_points]
|
||||
y_coords = [p[1] for p in boundary_points]
|
||||
self.width_meters = max(x_coords) - min(x_coords)
|
||||
self.height_meters = max(y_coords) - min(y_coords)
|
||||
|
||||
print(f"Custom building boundary set with {len(boundary_points)} points")
|
||||
print(f"Bounding box: {self.width_meters:.1f}m x {self.height_meters:.1f}m")
|
||||
return True
|
||||
|
||||
def add_boundary_point_interactive(self):
|
||||
"""
|
||||
Interactively add points to define the building boundary.
|
||||
"""
|
||||
if self.image is None:
|
||||
print("No image loaded. Please load an image first.")
|
||||
return
|
||||
|
||||
print("\n=== Adding Building Boundary Point ===")
|
||||
print("Enter coordinates in pixels (use grid as reference):")
|
||||
|
||||
try:
|
||||
x_pixels = int(input("X coordinate: "))
|
||||
y_pixels = int(input("Y coordinate: "))
|
||||
except ValueError:
|
||||
print("Invalid coordinates. Please enter numbers only.")
|
||||
return
|
||||
|
||||
# Convert to meters
|
||||
x_m, y_m = self.pixel_to_meters(x_pixels, y_pixels)
|
||||
|
||||
# Initialize boundary if not exists
|
||||
if self.building_boundary is None:
|
||||
self.building_boundary = []
|
||||
|
||||
self.building_boundary.append((x_m, y_m))
|
||||
self.use_custom_boundary = True
|
||||
|
||||
print(f"Added boundary point: ({x_m:.1f}m, {y_m:.1f}m)")
|
||||
print(f"Total boundary points: {len(self.building_boundary)}")
|
||||
|
||||
# Update bounding box
|
||||
if len(self.building_boundary) >= 2:
|
||||
x_coords = [p[0] for p in self.building_boundary]
|
||||
y_coords = [p[1] for p in self.building_boundary]
|
||||
self.width_meters = max(x_coords) - min(x_coords)
|
||||
self.height_meters = max(y_coords) - min(y_coords)
|
||||
|
||||
# Automatically refresh the display
|
||||
self.display_image_with_grid()
|
||||
|
||||
def define_boundary_by_coordinates(self):
|
||||
"""
|
||||
Define custom polygon boundary by entering coordinates directly.
|
||||
Professional-grade: ensures at least 3 points, closes polygon, and validates input.
|
||||
"""
|
||||
if self.image is None:
|
||||
print("No image loaded. Please load an image first.")
|
||||
return
|
||||
|
||||
print("\n=== Define Custom Polygon Boundary by Coordinates ===")
|
||||
print("Enter coordinates in meters (not pixels). Example: 0,0 or 10.5,15.2")
|
||||
print("Type 'done' when finished to close the polygon. Minimum 3 points required.")
|
||||
|
||||
self.building_boundary = []
|
||||
self.use_custom_boundary = True
|
||||
point_num = 1
|
||||
|
||||
while True:
|
||||
print(f"\n--- Point {point_num} ---")
|
||||
coord_input = input("Enter coordinates (x,y) or 'done': ").strip().lower()
|
||||
if coord_input == 'done':
|
||||
break
|
||||
try:
|
||||
if ',' in coord_input:
|
||||
x_str, y_str = coord_input.split(',')
|
||||
x_m = float(x_str.strip())
|
||||
y_m = float(y_str.strip())
|
||||
else:
|
||||
print("Invalid format. Use 'x,y' format (e.g., 10.5,15.2)")
|
||||
continue
|
||||
self.building_boundary.append((x_m, y_m))
|
||||
print(f"Added point {point_num}: ({x_m:.2f}m, {y_m:.2f}m)")
|
||||
point_num += 1
|
||||
# Optionally show preview after each point
|
||||
if len(self.building_boundary) >= 2:
|
||||
self.display_image_with_grid()
|
||||
except ValueError:
|
||||
print("Invalid coordinates. Please enter numbers in 'x,y' format.")
|
||||
continue
|
||||
# Validation
|
||||
if len(self.building_boundary) < 3:
|
||||
print("Error: Need at least 3 points to form a polygon boundary.")
|
||||
self.building_boundary = []
|
||||
return
|
||||
# Close the polygon if not already closed
|
||||
if self.building_boundary[0] != self.building_boundary[-1]:
|
||||
self.building_boundary.append(self.building_boundary[0])
|
||||
print(f"Custom polygon boundary defined with {len(self.building_boundary)-1} sides.")
|
||||
self.display_image_with_grid()
|
||||
|
||||
def define_boundary_by_grid_clicking(self):
|
||||
"""
|
||||
Define custom polygon boundary by clicking on grid coordinates.
|
||||
Professional-grade: ensures at least 3 points, closes polygon, and validates input.
|
||||
"""
|
||||
if self.image is None:
|
||||
print("No image loaded. Please load an image first.")
|
||||
return
|
||||
print("\n=== Define Custom Polygon Boundary by Grid Clicking ===")
|
||||
print("Look at the grid overlay in 'floor_plan_current_state.png'")
|
||||
print("Enter pixel coordinates from the grid (e.g., 100,50)")
|
||||
print("The system will convert pixels to meters automatically.")
|
||||
print("Type 'done' when finished to close the polygon. Minimum 3 points required.")
|
||||
self.building_boundary = []
|
||||
self.use_custom_boundary = True
|
||||
point_num = 1
|
||||
while True:
|
||||
print(f"\n--- Point {point_num} ---")
|
||||
coord_input = input("Enter pixel coordinates (x,y) or 'done': ").strip().lower()
|
||||
if coord_input == 'done':
|
||||
break
|
||||
try:
|
||||
if ',' in coord_input:
|
||||
x_str, y_str = coord_input.split(',')
|
||||
x_pixels = int(x_str.strip())
|
||||
y_pixels = int(y_str.strip())
|
||||
else:
|
||||
print("Invalid format. Use 'x,y' format (e.g., 100,50)")
|
||||
continue
|
||||
x_m, y_m = self.pixel_to_meters(x_pixels, y_pixels)
|
||||
self.building_boundary.append((x_m, y_m))
|
||||
print(f"Added point {point_num}: Pixel ({x_pixels},{y_pixels}) → Meter ({x_m:.2f}m, {y_m:.2f}m)")
|
||||
point_num += 1
|
||||
if len(self.building_boundary) >= 2:
|
||||
self.display_image_with_grid()
|
||||
except ValueError:
|
||||
print("Invalid coordinates. Please enter numbers in 'x,y' format.")
|
||||
continue
|
||||
if len(self.building_boundary) < 3:
|
||||
print("Error: Need at least 3 points to form a polygon boundary.")
|
||||
self.building_boundary = []
|
||||
return
|
||||
if self.building_boundary[0] != self.building_boundary[-1]:
|
||||
self.building_boundary.append(self.building_boundary[0])
|
||||
print(f"Custom polygon boundary defined with {len(self.building_boundary)-1} sides.")
|
||||
self.display_image_with_grid()
|
||||
|
||||
def finish_boundary(self):
|
||||
"""
|
||||
Finish defining the building boundary and close the polygon.
|
||||
"""
|
||||
if self.building_boundary is None or len(self.building_boundary) < 3:
|
||||
print("Error: Need at least 3 points to form a building boundary")
|
||||
return False
|
||||
|
||||
# Close the polygon by adding the first point at the end if not already closed
|
||||
if self.building_boundary[0] != self.building_boundary[-1]:
|
||||
self.building_boundary.append(self.building_boundary[0])
|
||||
|
||||
print(f"Building boundary completed with {len(self.building_boundary)} points")
|
||||
|
||||
# Automatically refresh the display
|
||||
self.display_image_with_grid()
|
||||
|
||||
return True
|
||||
|
||||
def clear_boundary(self):
|
||||
"""Clear the current building boundary."""
|
||||
self.building_boundary = None
|
||||
self.use_custom_boundary = False
|
||||
print("Building boundary cleared")
|
||||
|
||||
def get_building_perimeter_polygon(self):
|
||||
"""
|
||||
Get the building perimeter polygon for AP placement optimization.
|
||||
|
||||
Returns:
|
||||
List of (x, y) tuples defining the building perimeter, or None if not available
|
||||
"""
|
||||
if self.use_custom_boundary and self.building_boundary:
|
||||
return self.building_boundary
|
||||
elif not self.use_custom_boundary and self.width_meters and self.height_meters:
|
||||
# Return rectangular boundary
|
||||
return [
|
||||
(0, 0),
|
||||
(self.width_meters, 0),
|
||||
(self.width_meters, self.height_meters),
|
||||
(0, self.height_meters),
|
||||
(0, 0)
|
||||
]
|
||||
return None
|
||||
|
||||
def is_point_inside_building(self, x: float, y: float) -> bool:
|
||||
"""
|
||||
Check if a point is inside the building boundary.
|
||||
|
||||
Args:
|
||||
x: X coordinate in meters
|
||||
y: Y coordinate in meters
|
||||
|
||||
Returns:
|
||||
bool: True if point is inside building boundary
|
||||
"""
|
||||
if self.use_custom_boundary and self.building_boundary:
|
||||
# Use custom polygon boundary
|
||||
path = Path(self.building_boundary)
|
||||
return path.contains_point((x, y))
|
||||
elif not self.use_custom_boundary and self.width_meters and self.height_meters:
|
||||
# Use rectangular boundary
|
||||
return 0 <= x <= self.width_meters and 0 <= y <= self.height_meters
|
||||
return False
|
||||
|
||||
def pixel_to_meters(self, x_pixels: int, y_pixels: int) -> Tuple[float, float]:
|
||||
"""
|
||||
Convert pixel coordinates to real-world meters.
|
||||
|
||||
Args:
|
||||
x_pixels: X coordinate in pixels
|
||||
y_pixels: Y coordinate in pixels
|
||||
|
||||
Returns:
|
||||
Tuple of (x_meters, y_meters)
|
||||
"""
|
||||
if self.image is None or self.width_meters is None or self.height_meters is None:
|
||||
return (0, 0)
|
||||
|
||||
img_height, img_width = self.image.shape[:2]
|
||||
x_meters = (x_pixels / img_width) * self.width_meters
|
||||
y_meters = (y_pixels / img_height) * self.height_meters
|
||||
return (x_meters, y_meters)
|
||||
|
||||
def meters_to_pixels(self, x_meters: float, y_meters: float) -> Tuple[int, int]:
|
||||
"""
|
||||
Convert real-world meters to pixel coordinates.
|
||||
|
||||
Args:
|
||||
x_meters: X coordinate in meters
|
||||
y_meters: Y coordinate in meters
|
||||
|
||||
Returns:
|
||||
Tuple of (x_pixels, y_pixels)
|
||||
"""
|
||||
if self.image is None or self.width_meters is None or self.height_meters is None:
|
||||
return (0, 0)
|
||||
|
||||
img_height, img_width = self.image.shape[:2]
|
||||
x_pixels = int((x_meters / self.width_meters) * img_width)
|
||||
y_pixels = int((y_meters / self.height_meters) * img_height)
|
||||
return (x_pixels, y_pixels)
|
||||
|
||||
def display_image_with_grid(self):
|
||||
"""Display the floor plan image with a grid overlay and current boundary/regions."""
|
||||
if self.image is None:
|
||||
print("No image loaded. Please load an image first.")
|
||||
return
|
||||
|
||||
fig, ax = plt.subplots(figsize=(15, 10))
|
||||
ax.imshow(self.image)
|
||||
|
||||
# Add dense grid overlay with markings every 10 units
|
||||
img_height, img_width = self.image.shape[:2]
|
||||
grid_spacing = 10 # pixels - dense grid every 10 units
|
||||
|
||||
# Vertical lines
|
||||
for x in range(0, img_width, grid_spacing):
|
||||
alpha = 0.6 if x % 50 == 0 else 0.4 # Thicker lines every 50 pixels
|
||||
linewidth = 1.5 if x % 50 == 0 else 0.8
|
||||
ax.axvline(x=x, color='darkred', alpha=alpha, linewidth=linewidth)
|
||||
|
||||
# Horizontal lines
|
||||
for y in range(0, img_height, grid_spacing):
|
||||
alpha = 0.6 if y % 50 == 0 else 0.4 # Thicker lines every 50 pixels
|
||||
linewidth = 1.5 if y % 50 == 0 else 0.8
|
||||
ax.axhline(y=y, color='darkred', alpha=alpha, linewidth=linewidth)
|
||||
|
||||
# Add coordinate labels every 50 pixels (major grid lines)
|
||||
for x in range(0, img_width, 50):
|
||||
ax.text(x, 10, f'{x}', color='darkred', fontsize=8, ha='center', weight='bold')
|
||||
for y in range(0, img_height, 50):
|
||||
ax.text(10, y, f'{y}', color='darkred', fontsize=8, va='center', weight='bold')
|
||||
|
||||
# Draw building boundary if defined
|
||||
if self.building_boundary:
|
||||
boundary_pixels = [self.meters_to_pixels(x, y) for x, y in self.building_boundary]
|
||||
boundary_pixels = [(x, img_height - y) for x, y in boundary_pixels] # Flip Y for image coordinates
|
||||
|
||||
# Draw boundary line
|
||||
boundary_x = [p[0] for p in boundary_pixels]
|
||||
boundary_y = [p[1] for p in boundary_pixels]
|
||||
ax.plot(boundary_x, boundary_y, 'b-', linewidth=3, label='Building Boundary')
|
||||
|
||||
# Draw prominent boundary points with dots and labels
|
||||
for i, (x, y) in enumerate(boundary_pixels):
|
||||
# Large, prominent dot
|
||||
ax.scatter(x, y, c='red', s=100, zorder=10, edgecolors='black', linewidth=2)
|
||||
|
||||
# Point number label
|
||||
ax.text(x + 5, y + 5, f'P{i+1}', fontsize=12, fontweight='bold',
|
||||
color='red', bbox=dict(boxstyle="round,pad=0.3", facecolor="white", alpha=0.8))
|
||||
|
||||
# Fill boundary area
|
||||
ax.fill(boundary_x, boundary_y, alpha=0.1, color='blue')
|
||||
|
||||
# Draw regions if any
|
||||
for x, y, w, h, material in self.regions:
|
||||
x_pix, y_pix = self.meters_to_pixels(x, y)
|
||||
w_pix, h_pix = self.meters_to_pixels(w, h)
|
||||
|
||||
# Get material color
|
||||
color = self.get_material_color(material)
|
||||
|
||||
rect = Rectangle((x_pix, y_pix), w_pix, h_pix,
|
||||
facecolor=color, alpha=0.6, edgecolor='black', linewidth=2)
|
||||
ax.add_patch(rect)
|
||||
|
||||
# Add label
|
||||
ax.text(x_pix + w_pix/2, y_pix + h_pix/2, material,
|
||||
ha='center', va='center', fontsize=10, fontweight='bold',
|
||||
bbox=dict(boxstyle="round,pad=0.3", facecolor="white", alpha=0.8))
|
||||
|
||||
# Set title based on current state
|
||||
title = "Floor Plan with Grid Overlay"
|
||||
if self.building_boundary:
|
||||
title += " and Building Boundary"
|
||||
if self.regions:
|
||||
title += " and Regions"
|
||||
title += "\nRed grid: 10-unit spacing, Thicker lines: 50-unit spacing"
|
||||
|
||||
ax.set_title(title)
|
||||
ax.set_xlabel('X (pixels)')
|
||||
ax.set_ylabel('Y (pixels)')
|
||||
plt.tight_layout()
|
||||
|
||||
# Save the image instead of showing it
|
||||
output_path = 'floor_plan_current_state.png'
|
||||
plt.savefig(output_path, dpi=150, bbox_inches='tight')
|
||||
plt.close()
|
||||
print(f"Current floor plan state saved to: {output_path}")
|
||||
print("This file updates automatically with all your changes.")
|
||||
print("Use this image as reference for entering coordinates.")
|
||||
|
||||
def add_region_interactive(self):
|
||||
"""
|
||||
Interactively add a region to the floor plan.
|
||||
User clicks to define a rectangle and selects material.
|
||||
"""
|
||||
if self.image is None:
|
||||
print("No image loaded. Please load an image first.")
|
||||
return
|
||||
|
||||
print("\n=== Adding Region ===")
|
||||
print("Available materials:")
|
||||
for i, material_name in enumerate(MATERIALS.keys(), 1):
|
||||
print(f"{i:2d}. {material_name}")
|
||||
|
||||
# Get material selection
|
||||
while True:
|
||||
try:
|
||||
material_choice = int(input("\nSelect material number: ")) - 1
|
||||
if 0 <= material_choice < len(MATERIALS):
|
||||
material_name = list(MATERIALS.keys())[material_choice]
|
||||
break
|
||||
else:
|
||||
print("Invalid selection. Please try again.")
|
||||
except ValueError:
|
||||
print("Please enter a valid number.")
|
||||
|
||||
# Get region coordinates
|
||||
print(f"\nSelected material: {material_name}")
|
||||
print("Enter region coordinates (in pixels, use grid as reference):")
|
||||
|
||||
try:
|
||||
x = int(input("X coordinate (left edge): "))
|
||||
y = int(input("Y coordinate (bottom edge): "))
|
||||
width = int(input("Width (in pixels): "))
|
||||
height = int(input("Height (in pixels): "))
|
||||
except ValueError:
|
||||
print("Invalid coordinates. Please enter numbers only.")
|
||||
return
|
||||
|
||||
# Convert to meters
|
||||
x_m, y_m = self.pixel_to_meters(x, y)
|
||||
w_m, h_m = self.pixel_to_meters(width, height)
|
||||
|
||||
# Check if region is inside building boundary
|
||||
if self.use_custom_boundary and self.building_boundary:
|
||||
# Check if the region corners are inside the boundary
|
||||
corners = [(x_m, y_m), (x_m + w_m, y_m), (x_m, y_m + h_m), (x_m + w_m, y_m + h_m)]
|
||||
inside_count = sum(1 for corner in corners if self.is_point_inside_building(*corner))
|
||||
|
||||
if inside_count < 2: # At least half the corners should be inside
|
||||
print("Warning: Region appears to be mostly outside the building boundary")
|
||||
proceed = input("Continue anyway? (y/n): ").lower()
|
||||
if proceed != 'y':
|
||||
return
|
||||
|
||||
# Add region
|
||||
self.regions.append((x_m, y_m, w_m, h_m, material_name))
|
||||
print(f"Added region: {material_name} at ({x_m:.1f}m, {y_m:.1f}m) with size {w_m:.1f}m x {h_m:.1f}m")
|
||||
|
||||
# Automatically refresh the display
|
||||
self.display_image_with_grid()
|
||||
|
||||
def remove_region(self):
|
||||
"""Remove the last added region."""
|
||||
if self.regions:
|
||||
removed = self.regions.pop()
|
||||
print(f"Removed region: {removed[4]} at ({removed[0]:.1f}m, {removed[1]:.1f}m)")
|
||||
# Automatically refresh the display
|
||||
self.display_image_with_grid()
|
||||
else:
|
||||
print("No regions to remove.")
|
||||
|
||||
def list_regions(self):
|
||||
"""List all defined regions."""
|
||||
if not self.regions:
|
||||
print("No regions defined.")
|
||||
return
|
||||
|
||||
print("\n=== Defined Regions ===")
|
||||
for i, (x, y, w, h, material) in enumerate(self.regions, 1):
|
||||
print(f"{i}. {material}: ({x:.1f}m, {y:.1f}m) - {w:.1f}m x {h:.1f}m")
|
||||
|
||||
def preview_regions(self):
|
||||
"""Display the floor plan with all defined regions overlaid - same as display_image_with_grid."""
|
||||
# Use the same unified display method
|
||||
self.display_image_with_grid()
|
||||
|
||||
def get_material_color(self, material_name: str) -> str:
|
||||
"""Get the color for a material."""
|
||||
material_colors = {
|
||||
'concrete': '#808080', 'glass': '#ADD8E6', 'wood': '#8B4513',
|
||||
'drywall': '#F5F5F5', 'metal': '#C0C0C0', 'brick': "#A52929",
|
||||
'plaster': '#FFFACD', 'tile': '#D3D3D3', 'stone': '#A9A9A9',
|
||||
'asphalt': '#696969', 'carpet': '#B22222', 'plastic': '#FFB6C1',
|
||||
'foam': '#F0E68C', 'fabric': '#DDA0DD', 'paper': '#FFF0F5',
|
||||
'ceramic': '#FAFAD2', 'rubber': '#FF6347', 'air': '#FFFFFF'
|
||||
}
|
||||
return material_colors.get(material_name.lower(), '#FFFFFF')
|
||||
|
||||
def generate_materials_grid(self) -> bool:
|
||||
"""
|
||||
Generate the materials grid from defined regions, respecting building boundary.
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
if not self.regions:
|
||||
print("No regions defined. Please add regions first.")
|
||||
return False
|
||||
|
||||
if self.width_meters is None or self.height_meters is None:
|
||||
print("Building dimensions not set. Please set dimensions first.")
|
||||
return False
|
||||
|
||||
# Create visualizer with bounding box dimensions
|
||||
self.visualizer = BuildingVisualizer(
|
||||
width=self.width_meters,
|
||||
height=self.height_meters,
|
||||
resolution=self.resolution
|
||||
)
|
||||
|
||||
# Add user-defined regions
|
||||
for x, y, w, h, material_name in self.regions:
|
||||
if material_name in MATERIALS:
|
||||
self.visualizer.add_material(MATERIALS[material_name], x, y, w, h)
|
||||
else:
|
||||
print(f"Warning: Unknown material '{material_name}', using air instead.")
|
||||
self.visualizer.add_material(MATERIALS['air'], x, y, w, h)
|
||||
|
||||
# If using custom boundary, mask areas outside the boundary
|
||||
if self.use_custom_boundary and self.building_boundary:
|
||||
self._apply_boundary_mask()
|
||||
|
||||
self.materials_grid = self.visualizer.materials_grid
|
||||
print(f"Generated materials grid: {len(self.materials_grid)} x {len(self.materials_grid[0])} cells")
|
||||
return True
|
||||
|
||||
def _apply_boundary_mask(self):
|
||||
"""
|
||||
Apply building boundary mask to the materials grid.
|
||||
Areas outside the boundary will be set to None (no material).
|
||||
"""
|
||||
if not self.building_boundary or self.materials_grid is None:
|
||||
return
|
||||
|
||||
# Create boundary path
|
||||
boundary_path = Path(self.building_boundary)
|
||||
|
||||
# Apply mask to materials grid
|
||||
for i in range(len(self.materials_grid)):
|
||||
for j in range(len(self.materials_grid[0])):
|
||||
# Convert grid coordinates to real coordinates
|
||||
x = j * self.resolution
|
||||
y = i * self.resolution
|
||||
|
||||
# Check if point is inside boundary
|
||||
if not boundary_path.contains_point((x, y)):
|
||||
self.materials_grid[i][j] = MATERIALS['air'] # Use air instead of None
|
||||
|
||||
def save_configuration(self, output_path: str):
|
||||
"""
|
||||
Save the floor plan configuration to a JSON file.
|
||||
|
||||
Args:
|
||||
output_path: Path to save the configuration file
|
||||
"""
|
||||
config = {
|
||||
'image_path': self.image_path,
|
||||
'width_meters': self.width_meters,
|
||||
'height_meters': self.height_meters,
|
||||
'resolution': self.resolution,
|
||||
'use_custom_boundary': self.use_custom_boundary,
|
||||
'building_boundary': self.building_boundary,
|
||||
'regions': [
|
||||
{
|
||||
'x': x, 'y': y, 'width': w, 'height': h, 'material': material
|
||||
}
|
||||
for x, y, w, h, material in self.regions
|
||||
]
|
||||
}
|
||||
|
||||
with open(output_path, 'w') as f:
|
||||
json.dump(config, f, indent=2)
|
||||
|
||||
print(f"Configuration saved to: {output_path}")
|
||||
|
||||
def load_configuration(self, config_path: str) -> bool:
|
||||
"""
|
||||
Load a floor plan configuration from a JSON file.
|
||||
|
||||
Args:
|
||||
config_path: Path to the configuration file
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Load image (optional - don't fail if image is missing)
|
||||
if 'image_path' in config:
|
||||
if os.path.exists(config['image_path']):
|
||||
if not self.load_image(config['image_path']):
|
||||
print(f"Warning: Could not load image from {config['image_path']}, but continuing with configuration")
|
||||
else:
|
||||
print(f"Warning: Image file not found at {config['image_path']}, but continuing with configuration")
|
||||
|
||||
# Set dimensions
|
||||
if 'width_meters' in config and 'height_meters' in config:
|
||||
self.width_meters = config['width_meters']
|
||||
self.height_meters = config['height_meters']
|
||||
else:
|
||||
print("Error: Building dimensions not found in configuration")
|
||||
return False
|
||||
|
||||
# Load custom boundary if present
|
||||
if 'use_custom_boundary' in config and config['use_custom_boundary']:
|
||||
if 'building_boundary' in config:
|
||||
self.building_boundary = config['building_boundary']
|
||||
self.use_custom_boundary = True
|
||||
print(f"Loaded custom building boundary with {len(self.building_boundary)} points")
|
||||
else:
|
||||
print("Warning: Custom boundary flag set but no boundary data found")
|
||||
|
||||
# Load regions
|
||||
self.regions = []
|
||||
if 'regions' in config:
|
||||
for region in config['regions']:
|
||||
self.regions.append((
|
||||
region['x'], region['y'], region['width'], region['height'], region['material']
|
||||
))
|
||||
|
||||
print(f"Configuration loaded from: {config_path}")
|
||||
print(f" Building dimensions: {self.width_meters}m x {self.height_meters}m")
|
||||
print(f" Custom boundary: {'Yes' if self.use_custom_boundary else 'No'}")
|
||||
print(f" Number of regions: {len(self.regions)}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error loading configuration: {e}")
|
||||
return False
|
||||
|
||||
def interactive_setup(self):
|
||||
"""
|
||||
Run an interactive setup session for the floor plan with custom boundary support.
|
||||
Professional-grade: robust dimension and boundary handling, clear user guidance, and validation.
|
||||
"""
|
||||
print("=== Enhanced Floor Plan Processor Interactive Setup ===")
|
||||
print("This setup supports custom building boundaries (polygon shapes)")
|
||||
|
||||
# --- Load image ---
|
||||
while True:
|
||||
image_path = input("\nEnter path to floor plan image (JPEG/PNG): ").strip()
|
||||
if self.load_image(image_path):
|
||||
break
|
||||
print("Could not load image. Please try again.")
|
||||
|
||||
# --- Set dimensions (must be > 0) ---
|
||||
while True:
|
||||
try:
|
||||
width = float(input("Enter building width in meters: "))
|
||||
height = float(input("Enter building height in meters: "))
|
||||
if width > 0 and height > 0:
|
||||
self.set_building_dimensions(width, height)
|
||||
break
|
||||
else:
|
||||
print("Width and height must be positive numbers.")
|
||||
except ValueError:
|
||||
print("Please enter valid numbers.")
|
||||
|
||||
# --- Clear any existing boundary or regions ---
|
||||
self.building_boundary = None
|
||||
self.regions = []
|
||||
self.use_custom_boundary = False
|
||||
|
||||
# --- Create grid overlay ---
|
||||
print("\nCreating grid overlay for your floor plan...")
|
||||
self.display_image_with_grid()
|
||||
print("✓ Grid overlay created! Check 'floor_plan_current_state.png'")
|
||||
print("Use this image as reference for entering coordinates.")
|
||||
|
||||
# --- Choose boundary type ---
|
||||
print("\n=== Building Boundary Setup ===")
|
||||
print("1. Use rectangular boundary (traditional)")
|
||||
print("2. Define custom polygon boundary by coordinates (recommended)")
|
||||
print("3. Define custom polygon boundary by clicking grid coordinates")
|
||||
|
||||
while True:
|
||||
try:
|
||||
choice = int(input("Choose boundary type (1, 2, or 3): "))
|
||||
if choice == 1:
|
||||
# Rectangular boundary: set as 4-corner polygon
|
||||
print("\nDefining rectangular boundary...")
|
||||
# Confirm dimensions
|
||||
print(f"Current dimensions: width={self.width_meters}m, height={self.height_meters}m")
|
||||
confirm = input("Use these dimensions? (y/n): ").strip().lower()
|
||||
if confirm != 'y':
|
||||
while True:
|
||||
try:
|
||||
width = float(input("Enter building width in meters: "))
|
||||
height = float(input("Enter building height in meters: "))
|
||||
if width > 0 and height > 0:
|
||||
self.set_building_dimensions(width, height)
|
||||
break
|
||||
else:
|
||||
print("Width and height must be positive numbers.")
|
||||
except ValueError:
|
||||
print("Please enter valid numbers.")
|
||||
# Set rectangular boundary as polygon
|
||||
self.building_boundary = [
|
||||
(0, 0),
|
||||
(self.width_meters, 0),
|
||||
(self.width_meters, self.height_meters),
|
||||
(0, self.height_meters),
|
||||
(0, 0)
|
||||
]
|
||||
self.use_custom_boundary = False
|
||||
print(f"Rectangular boundary set: {self.building_boundary}")
|
||||
break
|
||||
elif choice == 2:
|
||||
# Custom polygon boundary by coordinates
|
||||
self.define_boundary_by_coordinates()
|
||||
break
|
||||
elif choice == 3:
|
||||
# Custom polygon boundary by clicking grid coordinates
|
||||
self.define_boundary_by_grid_clicking()
|
||||
break
|
||||
else:
|
||||
print("Invalid choice. Please enter 1, 2, or 3.")
|
||||
except ValueError:
|
||||
print("Please enter a valid number.")
|
||||
|
||||
# --- Display image with grid and boundary ---
|
||||
self.display_image_with_grid()
|
||||
|
||||
# --- Interactive region definition ---
|
||||
while True:
|
||||
print("\n=== Region Management ===")
|
||||
print("1. Add region")
|
||||
print("2. Remove last region")
|
||||
print("3. List regions")
|
||||
print("4. Show current state (refresh display)")
|
||||
print("5. Generate materials grid")
|
||||
print("6. Save configuration")
|
||||
print("7. Exit")
|
||||
|
||||
try:
|
||||
choice = int(input("Choose option (1-7): "))
|
||||
|
||||
if choice == 1:
|
||||
self.add_region_interactive()
|
||||
elif choice == 2:
|
||||
self.remove_region()
|
||||
elif choice == 3:
|
||||
self.list_regions()
|
||||
elif choice == 4:
|
||||
self.display_image_with_grid()
|
||||
elif choice == 5:
|
||||
if self.generate_materials_grid():
|
||||
print("Materials grid generated successfully!")
|
||||
else:
|
||||
print("Failed to generate materials grid.")
|
||||
elif choice == 6:
|
||||
output_path = input("Enter output file path (e.g., my_floor_plan.json): ").strip()
|
||||
self.save_configuration(output_path)
|
||||
elif choice == 7:
|
||||
break
|
||||
else:
|
||||
print("Invalid choice. Please enter a number between 1 and 7.")
|
||||
|
||||
except ValueError:
|
||||
print("Please enter a valid number.")
|
||||
|
||||
print("Setup completed!")
|
||||
|
||||
def get_materials_grid(self):
|
||||
"""Get the generated materials grid."""
|
||||
return self.materials_grid
|
||||
|
||||
def get_visualizer(self):
|
||||
"""Get the building visualizer."""
|
||||
return self.visualizer
|
||||
|
||||
def generate_ap_placement_visualization(self, ap_locations: dict, rssi_grids: Optional[List] = None,
|
||||
output_path: str = "ap_placement_floor_plan.png"):
|
||||
"""
|
||||
Generate AP placement visualization on the floor plan image.
|
||||
|
||||
Args:
|
||||
ap_locations: Dictionary of AP locations
|
||||
rssi_grids: Optional list of RSSI grids for coverage visualization
|
||||
output_path: Path to save the visualization
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
if self.visualizer is None:
|
||||
print("No visualizer available. Please generate materials grid first.")
|
||||
return False
|
||||
|
||||
# Set floor plan image if available
|
||||
if self.image_path and os.path.exists(self.image_path):
|
||||
self.visualizer.set_floor_plan_image(self.image_path)
|
||||
|
||||
# Generate visualization
|
||||
if rssi_grids:
|
||||
return self.visualizer.plot_coverage_on_floor_plan_image(
|
||||
rssi_grids, ap_locations, output_path, show_regions=True
|
||||
)
|
||||
else:
|
||||
# Just show AP placement without coverage
|
||||
return self.visualizer.plot_ap_placement_on_floor_plan(
|
||||
ap_locations, None, output_path
|
||||
)
|
||||
939
src/floor_plan_analyzer.py
Normal file
939
src/floor_plan_analyzer.py
Normal file
@@ -0,0 +1,939 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Comprehensive Floor Plan Analyzer
|
||||
Maps building regions with coordinates, materials, and boundaries for AP placement and interference analysis.
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import logging
|
||||
from typing import Dict, List, Tuple, Optional, Any
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
import json
|
||||
import os
|
||||
|
||||
class MaterialType(Enum):
|
||||
"""Material types for building regions."""
|
||||
AIR = "air"
|
||||
BRICK = "brick"
|
||||
CONCRETE = "concrete"
|
||||
DRYWALL = "drywall"
|
||||
GLASS = "glass"
|
||||
CARPET = "carpet"
|
||||
TILE = "tile"
|
||||
METAL = "metal"
|
||||
WOOD = "wood"
|
||||
PLASTIC = "plastic"
|
||||
FABRIC = "fabric"
|
||||
STONE = "stone"
|
||||
|
||||
@dataclass
|
||||
class RegionBoundary:
|
||||
"""Defines the boundary of a building region."""
|
||||
x_min: float
|
||||
y_min: float
|
||||
x_max: float
|
||||
y_max: float
|
||||
z_min: float = 0.0
|
||||
z_max: float = 3.0 # Default ceiling height
|
||||
|
||||
@property
|
||||
def width(self) -> float:
|
||||
return self.x_max - self.x_min
|
||||
|
||||
@property
|
||||
def height(self) -> float:
|
||||
return self.y_max - self.y_min
|
||||
|
||||
@property
|
||||
def depth(self) -> float:
|
||||
return self.z_max - self.z_min
|
||||
|
||||
@property
|
||||
def area(self) -> float:
|
||||
return self.width * self.height
|
||||
|
||||
@property
|
||||
def volume(self) -> float:
|
||||
return self.area * self.depth
|
||||
|
||||
@property
|
||||
def center(self) -> Tuple[float, float, float]:
|
||||
return (
|
||||
(self.x_min + self.x_max) / 2,
|
||||
(self.y_min + self.y_max) / 2,
|
||||
(self.z_min + self.z_max) / 2
|
||||
)
|
||||
|
||||
def contains_point(self, x: float, y: float, z: float = 1.5) -> bool:
|
||||
"""Check if a point is inside this region."""
|
||||
return (self.x_min <= x <= self.x_max and
|
||||
self.y_min <= y <= self.y_max and
|
||||
self.z_min <= z <= self.z_max)
|
||||
|
||||
def intersects(self, other: 'RegionBoundary') -> bool:
|
||||
"""Check if this region intersects with another."""
|
||||
return not (self.x_max < other.x_min or self.x_min > other.x_max or
|
||||
self.y_max < other.y_min or self.y_min > other.y_max or
|
||||
self.z_max < other.z_min or self.z_min > other.z_max)
|
||||
|
||||
@dataclass
|
||||
class BuildingRegion:
|
||||
"""Represents a region in the building with full metadata."""
|
||||
id: str
|
||||
name: str
|
||||
region_type: str # 'room', 'corridor', 'wall', 'open_space', 'facility'
|
||||
boundary: RegionBoundary
|
||||
material: MaterialType
|
||||
material_properties: Dict[str, Any]
|
||||
usage: str = "general"
|
||||
priority: int = 1 # Higher priority regions get APs first
|
||||
user_density: float = 0.1 # Users per square meter
|
||||
device_density: float = 0.15 # Devices per square meter
|
||||
interference_sensitivity: float = 1.0 # How sensitive to interference
|
||||
coverage_requirement: float = 0.9 # Required coverage percentage
|
||||
polygon: Optional[List[Tuple[float, float]]] = None # Polygonal boundary points
|
||||
is_polygonal: bool = False # Whether this region uses polygon instead of bounding box
|
||||
|
||||
def __post_init__(self):
|
||||
"""Set default material properties based on material type."""
|
||||
if not self.material_properties:
|
||||
self.material_properties = self._get_default_material_properties()
|
||||
|
||||
# Determine if this is a polygonal region
|
||||
if self.polygon is not None and len(self.polygon) >= 3:
|
||||
self.is_polygonal = True
|
||||
# Update boundary to encompass the polygon
|
||||
xs = [pt[0] for pt in self.polygon]
|
||||
ys = [pt[1] for pt in self.polygon]
|
||||
self.boundary = RegionBoundary(
|
||||
x_min=min(xs), y_min=min(ys),
|
||||
x_max=max(xs), y_max=max(ys),
|
||||
z_min=self.boundary.z_min, z_max=self.boundary.z_max
|
||||
)
|
||||
|
||||
def _get_default_material_properties(self) -> Dict[str, Any]:
|
||||
"""Get default properties for the material type."""
|
||||
defaults = {
|
||||
MaterialType.AIR: {
|
||||
'attenuation_db': 0.0,
|
||||
'reflection_coefficient': 0.0,
|
||||
'transmission_coefficient': 1.0,
|
||||
'frequency_dependent': False
|
||||
},
|
||||
MaterialType.BRICK: {
|
||||
'attenuation_db': 8.0,
|
||||
'reflection_coefficient': 0.3,
|
||||
'transmission_coefficient': 0.1,
|
||||
'frequency_dependent': True
|
||||
},
|
||||
MaterialType.CONCRETE: {
|
||||
'attenuation_db': 12.0,
|
||||
'reflection_coefficient': 0.4,
|
||||
'transmission_coefficient': 0.05,
|
||||
'frequency_dependent': True
|
||||
},
|
||||
MaterialType.DRYWALL: {
|
||||
'attenuation_db': 3.0,
|
||||
'reflection_coefficient': 0.2,
|
||||
'transmission_coefficient': 0.3,
|
||||
'frequency_dependent': True
|
||||
},
|
||||
MaterialType.GLASS: {
|
||||
'attenuation_db': 2.0,
|
||||
'reflection_coefficient': 0.1,
|
||||
'transmission_coefficient': 0.8,
|
||||
'frequency_dependent': True
|
||||
},
|
||||
MaterialType.CARPET: {
|
||||
'attenuation_db': 1.0,
|
||||
'reflection_coefficient': 0.1,
|
||||
'transmission_coefficient': 0.9,
|
||||
'frequency_dependent': False
|
||||
},
|
||||
MaterialType.TILE: {
|
||||
'attenuation_db': 1.5,
|
||||
'reflection_coefficient': 0.2,
|
||||
'transmission_coefficient': 0.8,
|
||||
'frequency_dependent': False
|
||||
}
|
||||
}
|
||||
return defaults.get(self.material, defaults[MaterialType.AIR])
|
||||
|
||||
def contains_point(self, x: float, y: float, z: float = 1.5) -> bool:
|
||||
"""Check if a point is inside this region (supports both bounding box and polygon)."""
|
||||
# First check if point is within bounding box (quick rejection)
|
||||
if not self.boundary.contains_point(x, y, z):
|
||||
return False
|
||||
|
||||
# If it's a polygonal region, do detailed polygon test
|
||||
if self.is_polygonal and self.polygon:
|
||||
return point_in_polygon(x, y, self.polygon)
|
||||
|
||||
# Otherwise, use bounding box
|
||||
return True
|
||||
|
||||
def get_centroid(self) -> Tuple[float, float, float]:
|
||||
"""Get the centroid of the region (center of mass for polygons)."""
|
||||
if self.is_polygonal and self.polygon:
|
||||
# Calculate centroid of polygon
|
||||
n = len(self.polygon)
|
||||
if n == 0:
|
||||
return self.boundary.center
|
||||
|
||||
# Shoelace formula for polygon centroid
|
||||
cx = cy = 0.0
|
||||
area = 0.0
|
||||
|
||||
for i in range(n):
|
||||
j = (i + 1) % n
|
||||
xi, yi = self.polygon[i]
|
||||
xj, yj = self.polygon[j]
|
||||
|
||||
cross = xi * yj - xj * yi
|
||||
cx += (xi + xj) * cross
|
||||
cy += (yi + yj) * cross
|
||||
area += cross
|
||||
|
||||
if abs(area) < 1e-10: # Degenerate polygon
|
||||
return self.boundary.center
|
||||
|
||||
area /= 2.0
|
||||
cx /= (6.0 * area)
|
||||
cy /= (6.0 * area)
|
||||
|
||||
return (cx, cy, (self.boundary.z_min + self.boundary.z_max) / 2)
|
||||
else:
|
||||
return self.boundary.center
|
||||
|
||||
def get_area(self) -> float:
|
||||
"""Calculate the area of the region."""
|
||||
if self.is_polygonal and self.polygon:
|
||||
# Calculate polygon area using shoelace formula
|
||||
n = len(self.polygon)
|
||||
if n < 3:
|
||||
return 0.0
|
||||
|
||||
area = 0.0
|
||||
for i in range(n):
|
||||
j = (i + 1) % n
|
||||
xi, yi = self.polygon[i]
|
||||
xj, yj = self.polygon[j]
|
||||
area += xi * yj - xj * yi
|
||||
|
||||
return abs(area) / 2.0
|
||||
else:
|
||||
return self.boundary.area
|
||||
|
||||
def get_perimeter(self) -> float:
|
||||
"""Calculate the perimeter of the region."""
|
||||
if self.is_polygonal and self.polygon:
|
||||
# Calculate polygon perimeter
|
||||
n = len(self.polygon)
|
||||
if n < 2:
|
||||
return 0.0
|
||||
|
||||
perimeter = 0.0
|
||||
for i in range(n):
|
||||
j = (i + 1) % n
|
||||
xi, yi = self.polygon[i]
|
||||
xj, yj = self.polygon[j]
|
||||
perimeter += np.sqrt((xj - xi)**2 + (yj - yi)**2)
|
||||
|
||||
return perimeter
|
||||
else:
|
||||
return 2 * (self.boundary.width + self.boundary.height)
|
||||
|
||||
def get_optimal_ap_positions(self, num_aps: int = 1) -> List[Tuple[float, float, float]]:
|
||||
"""Get optimal AP positions within this region."""
|
||||
if num_aps <= 0:
|
||||
return []
|
||||
|
||||
if self.is_polygonal and self.polygon:
|
||||
# For polygonal regions, use centroid and distribute around it
|
||||
centroid = self.get_centroid()
|
||||
if num_aps == 1:
|
||||
return [centroid]
|
||||
|
||||
# For multiple APs, distribute them within the polygon
|
||||
positions = []
|
||||
area = self.get_area()
|
||||
radius = np.sqrt(area / (np.pi * num_aps)) * 0.7 # 70% of theoretical radius
|
||||
|
||||
# Place first AP at centroid
|
||||
positions.append(centroid)
|
||||
|
||||
# Place remaining APs in a pattern within the polygon
|
||||
for i in range(1, num_aps):
|
||||
angle = 2 * np.pi * i / num_aps
|
||||
distance = radius * (0.5 + 0.5 * (i % 2)) # Vary distance
|
||||
|
||||
x = centroid[0] + distance * np.cos(angle)
|
||||
y = centroid[1] + distance * np.sin(angle)
|
||||
z = centroid[2]
|
||||
|
||||
# Ensure point is within polygon
|
||||
if self.contains_point(x, y, z):
|
||||
positions.append((x, y, z))
|
||||
else:
|
||||
# Fallback to centroid
|
||||
positions.append(centroid)
|
||||
|
||||
return positions
|
||||
else:
|
||||
# For rectangular regions, use grid placement
|
||||
if num_aps == 1:
|
||||
return [self.boundary.center]
|
||||
|
||||
# Calculate grid dimensions
|
||||
cols = int(np.ceil(np.sqrt(num_aps)))
|
||||
rows = int(np.ceil(num_aps / cols))
|
||||
|
||||
positions = []
|
||||
for i in range(num_aps):
|
||||
col = i % cols
|
||||
row = i // cols
|
||||
|
||||
x = self.boundary.x_min + (col + 0.5) * self.boundary.width / cols
|
||||
y = self.boundary.y_min + (row + 0.5) * self.boundary.height / rows
|
||||
z = (self.boundary.z_min + self.boundary.z_max) / 2
|
||||
|
||||
positions.append((x, y, z))
|
||||
|
||||
return positions
|
||||
|
||||
class FloorPlanAnalyzer:
|
||||
"""Comprehensive floor plan analyzer for building regions and materials."""
|
||||
|
||||
def __init__(self, building_width: float, building_length: float, building_height: float):
|
||||
self.building_width = building_width
|
||||
self.building_length = building_length
|
||||
self.building_height = building_height
|
||||
self.regions: List[BuildingRegion] = []
|
||||
self.materials_grid = None
|
||||
self.resolution = 0.2 # meters per grid cell
|
||||
|
||||
def analyze_complex_office_layout(self) -> List[BuildingRegion]:
|
||||
"""Analyze and create a comprehensive office building layout."""
|
||||
logging.info("Creating comprehensive office building layout analysis...")
|
||||
|
||||
regions = []
|
||||
region_id = 1
|
||||
|
||||
# Define building perimeter
|
||||
wall_thickness = 0.3
|
||||
perimeter = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="Building Perimeter",
|
||||
region_type="wall",
|
||||
boundary=RegionBoundary(0, 0, self.building_width, self.building_length),
|
||||
material=MaterialType.BRICK,
|
||||
material_properties={},
|
||||
usage="structural",
|
||||
priority=1
|
||||
)
|
||||
regions.append(perimeter)
|
||||
region_id += 1
|
||||
|
||||
# Define internal regions based on typical office layout
|
||||
internal_regions = self._define_internal_regions(region_id)
|
||||
regions.extend(internal_regions)
|
||||
|
||||
self.regions = regions
|
||||
logging.info(f"Created {len(regions)} building regions")
|
||||
|
||||
# Generate materials grid
|
||||
self._generate_materials_grid()
|
||||
|
||||
return regions
|
||||
|
||||
def _define_internal_regions(self, start_id: int) -> List[BuildingRegion]:
|
||||
"""Define internal building regions with realistic office layout."""
|
||||
regions = []
|
||||
region_id = start_id
|
||||
wall_thickness = 0.3
|
||||
|
||||
# Lobby and Reception Area
|
||||
lobby = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="Lobby",
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(wall_thickness, wall_thickness, 8.0, 6.0),
|
||||
material=MaterialType.TILE,
|
||||
material_properties={},
|
||||
usage="reception",
|
||||
priority=3,
|
||||
user_density=0.05,
|
||||
device_density=0.1
|
||||
)
|
||||
regions.append(lobby)
|
||||
region_id += 1
|
||||
|
||||
# Conference Rooms
|
||||
conf_rooms = [
|
||||
{"name": "Conference Room 1", "x": wall_thickness + 10, "y": self.building_length - 8, "w": 8, "h": 6},
|
||||
{"name": "Conference Room 2", "x": wall_thickness + 20, "y": self.building_length - 6, "w": 6, "h": 4},
|
||||
{"name": "Conference Room 3", "x": wall_thickness + 28, "y": self.building_length - 5, "w": 4, "h": 3}
|
||||
]
|
||||
|
||||
for conf in conf_rooms:
|
||||
room = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name=conf["name"],
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(conf["x"], conf["y"], conf["x"] + conf["w"], conf["y"] + conf["h"]),
|
||||
material=MaterialType.GLASS,
|
||||
material_properties={},
|
||||
usage="meeting",
|
||||
priority=4,
|
||||
user_density=0.3,
|
||||
device_density=0.4,
|
||||
interference_sensitivity=1.2
|
||||
)
|
||||
regions.append(room)
|
||||
region_id += 1
|
||||
|
||||
# Executive Offices
|
||||
exec_offices = [
|
||||
{"name": "CEO Office", "x": self.building_width - 8, "y": self.building_length - 10, "w": 8, "h": 10},
|
||||
{"name": "CFO Office", "x": self.building_width - 6, "y": self.building_length - 6, "w": 6, "h": 6}
|
||||
]
|
||||
|
||||
for office in exec_offices:
|
||||
room = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name=office["name"],
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(office["x"], office["y"], office["x"] + office["w"], office["y"] + office["h"]),
|
||||
material=MaterialType.CARPET,
|
||||
material_properties={},
|
||||
usage="executive",
|
||||
priority=5,
|
||||
user_density=0.1,
|
||||
device_density=0.2,
|
||||
interference_sensitivity=1.5
|
||||
)
|
||||
regions.append(room)
|
||||
region_id += 1
|
||||
|
||||
# Department Areas
|
||||
dept_areas = [
|
||||
{"name": "IT Department", "x": wall_thickness + 2, "y": wall_thickness + 8, "w": 12, "h": 8},
|
||||
{"name": "Marketing Department", "x": wall_thickness + 16, "y": wall_thickness + 8, "w": 10, "h": 8},
|
||||
{"name": "Sales Department", "x": wall_thickness + 28, "y": wall_thickness + 8, "w": 10, "h": 8}
|
||||
]
|
||||
|
||||
for dept in dept_areas:
|
||||
room = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name=dept["name"],
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(dept["x"], dept["y"], dept["x"] + dept["w"], dept["y"] + dept["h"]),
|
||||
material=MaterialType.CARPET,
|
||||
material_properties={},
|
||||
usage="department",
|
||||
priority=4,
|
||||
user_density=0.2,
|
||||
device_density=0.3
|
||||
)
|
||||
regions.append(room)
|
||||
region_id += 1
|
||||
|
||||
# Individual Offices
|
||||
office_width, office_height = 4.0, 5.0
|
||||
office_spacing = 0.5
|
||||
|
||||
for row in range(3):
|
||||
for col in range(3):
|
||||
x = wall_thickness + 2 + col * (office_width + office_spacing)
|
||||
y = wall_thickness + 18 + row * (office_height + 0.5)
|
||||
|
||||
office = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name=f"Office {row*3 + col + 1}",
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(x, y, x + office_width, y + office_height),
|
||||
material=MaterialType.DRYWALL,
|
||||
material_properties={},
|
||||
usage="individual",
|
||||
priority=3,
|
||||
user_density=0.1,
|
||||
device_density=0.15
|
||||
)
|
||||
regions.append(office)
|
||||
region_id += 1
|
||||
|
||||
# Facilities
|
||||
facilities = [
|
||||
{"name": "Break Room", "x": wall_thickness + 16, "y": wall_thickness + 18, "w": 6, "h": 4, "material": MaterialType.TILE},
|
||||
{"name": "Kitchen", "x": wall_thickness + 16, "y": wall_thickness + 24, "w": 6, "h": 3, "material": MaterialType.TILE},
|
||||
{"name": "Server Room", "x": wall_thickness + 2, "y": wall_thickness + 40, "w": 4, "h": 6, "material": MaterialType.CONCRETE},
|
||||
{"name": "Storage", "x": wall_thickness + 8, "y": wall_thickness + 40, "w": 4, "h": 6, "material": MaterialType.DRYWALL},
|
||||
{"name": "Men's Restroom", "x": wall_thickness + 30, "y": wall_thickness + 18, "w": 3, "h": 4, "material": MaterialType.TILE},
|
||||
{"name": "Women's Restroom", "x": wall_thickness + 35, "y": wall_thickness + 18, "w": 3, "h": 4, "material": MaterialType.TILE},
|
||||
{"name": "Print Room", "x": wall_thickness + 30, "y": wall_thickness + 24, "w": 4, "h": 3, "material": MaterialType.DRYWALL}
|
||||
]
|
||||
|
||||
for facility in facilities:
|
||||
room = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name=facility["name"],
|
||||
region_type="facility",
|
||||
boundary=RegionBoundary(facility["x"], facility["y"],
|
||||
facility["x"] + facility["w"], facility["y"] + facility["h"]),
|
||||
material=facility["material"],
|
||||
material_properties={},
|
||||
usage="facility",
|
||||
priority=2,
|
||||
user_density=0.05,
|
||||
device_density=0.1
|
||||
)
|
||||
regions.append(room)
|
||||
region_id += 1
|
||||
|
||||
# Phone Booths
|
||||
booths = [
|
||||
{"x": wall_thickness + 36, "y": wall_thickness + 8, "w": 2, "h": 2},
|
||||
{"x": wall_thickness + 36, "y": wall_thickness + 12, "w": 2, "h": 2}
|
||||
]
|
||||
|
||||
for i, booth in enumerate(booths):
|
||||
room = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name=f"Phone Booth {i+1}",
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(booth["x"], booth["y"],
|
||||
booth["x"] + booth["w"], booth["y"] + booth["h"]),
|
||||
material=MaterialType.GLASS,
|
||||
material_properties={},
|
||||
usage="private",
|
||||
priority=2,
|
||||
user_density=0.1,
|
||||
device_density=0.1
|
||||
)
|
||||
regions.append(room)
|
||||
region_id += 1
|
||||
|
||||
# Collaboration Space
|
||||
collab = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="Collaboration Space",
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(wall_thickness + 16, wall_thickness + 28,
|
||||
wall_thickness + 28, wall_thickness + 36),
|
||||
material=MaterialType.CARPET,
|
||||
material_properties={},
|
||||
usage="collaboration",
|
||||
priority=4,
|
||||
user_density=0.15,
|
||||
device_density=0.25,
|
||||
interference_sensitivity=1.1
|
||||
)
|
||||
regions.append(collab)
|
||||
region_id += 1
|
||||
|
||||
# Corridors
|
||||
corridors = [
|
||||
{"name": "Main Corridor", "x": wall_thickness + 2, "y": wall_thickness + 16, "w": self.building_width - 2*wall_thickness - 4, "h": 1.5},
|
||||
{"name": "Vertical Corridor", "x": wall_thickness + 15, "y": wall_thickness + 8, "w": 1.5, "h": 8}
|
||||
]
|
||||
|
||||
for corridor in corridors:
|
||||
room = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name=corridor["name"],
|
||||
region_type="corridor",
|
||||
boundary=RegionBoundary(corridor["x"], corridor["y"],
|
||||
corridor["x"] + corridor["w"], corridor["y"] + corridor["h"]),
|
||||
material=MaterialType.TILE,
|
||||
material_properties={},
|
||||
usage="circulation",
|
||||
priority=1,
|
||||
user_density=0.02,
|
||||
device_density=0.05
|
||||
)
|
||||
regions.append(room)
|
||||
region_id += 1
|
||||
|
||||
return regions
|
||||
|
||||
def _generate_materials_grid(self):
|
||||
"""Generate a 3D materials grid based on the regions."""
|
||||
grid_width = int(self.building_width / self.resolution)
|
||||
grid_height = int(self.building_length / self.resolution)
|
||||
grid_depth = int(self.building_height / self.resolution)
|
||||
|
||||
# Initialize with air
|
||||
self.materials_grid = [[[MaterialType.AIR for _ in range(grid_width)]
|
||||
for _ in range(grid_height)]
|
||||
for _ in range(grid_depth)]
|
||||
|
||||
# Fill in materials based on regions
|
||||
for region in self.regions:
|
||||
self._fill_region_in_grid(region)
|
||||
|
||||
logging.info(f"Generated 3D materials grid: {grid_depth}x{grid_height}x{grid_width}")
|
||||
|
||||
def _fill_region_in_grid(self, region: BuildingRegion):
|
||||
"""Fill a region's material into the 3D grid."""
|
||||
boundary = region.boundary
|
||||
if self.materials_grid is None:
|
||||
return
|
||||
# Convert to grid coordinates
|
||||
x_dim = len(self.materials_grid[0][0]) if self.materials_grid and self.materials_grid[0] and self.materials_grid[0][0] else 0
|
||||
y_dim = len(self.materials_grid[0]) if self.materials_grid and self.materials_grid[0] else 0
|
||||
z_dim = len(self.materials_grid) if self.materials_grid else 0
|
||||
x_min = max(0, int(boundary.x_min / self.resolution))
|
||||
x_max = min(x_dim, int(boundary.x_max / self.resolution))
|
||||
y_min = max(0, int(boundary.y_min / self.resolution))
|
||||
y_max = min(y_dim, int(boundary.y_max / self.resolution))
|
||||
z_min = max(0, int(boundary.z_min / self.resolution))
|
||||
z_max = min(z_dim, int(boundary.z_max / self.resolution))
|
||||
# Fill the region
|
||||
for z in range(z_min, z_max):
|
||||
for y in range(y_min, y_max):
|
||||
for x in range(x_min, x_max):
|
||||
# Convert grid coordinates back to world coordinates
|
||||
world_x = x * self.resolution
|
||||
world_y = y * self.resolution
|
||||
world_z = z * self.resolution
|
||||
|
||||
# Check if this grid cell is inside the region
|
||||
if region.contains_point(world_x, world_y, world_z):
|
||||
self.materials_grid[z][y][x] = region.material
|
||||
|
||||
def get_region_at_point(self, x: float, y: float, z: float = 1.5) -> Optional[BuildingRegion]:
|
||||
"""Get the region that contains a given point."""
|
||||
for region in self.regions:
|
||||
if region.contains_point(x, y, z):
|
||||
return region
|
||||
return None
|
||||
|
||||
def get_high_priority_regions(self) -> List[BuildingRegion]:
|
||||
"""Get regions that should have APs placed in them."""
|
||||
return [r for r in self.regions if r.region_type == "room" and r.priority >= 3]
|
||||
|
||||
def get_interference_sensitive_regions(self) -> List[BuildingRegion]:
|
||||
"""Get regions that are sensitive to interference."""
|
||||
return [r for r in self.regions if r.interference_sensitivity > 1.0]
|
||||
|
||||
def calculate_total_user_load(self) -> float:
|
||||
"""Calculate total user load across all regions."""
|
||||
total_load = 0.0
|
||||
for region in self.regions:
|
||||
if region.region_type == "room":
|
||||
total_load += region.boundary.area * region.user_density
|
||||
return total_load
|
||||
|
||||
def calculate_total_device_load(self) -> float:
|
||||
"""Calculate total device load across all regions."""
|
||||
total_load = 0.0
|
||||
for region in self.regions:
|
||||
if region.region_type == "room":
|
||||
total_load += region.boundary.area * region.device_density
|
||||
return total_load
|
||||
|
||||
def export_analysis(self, filepath: str):
|
||||
"""Export the floor plan analysis to JSON."""
|
||||
analysis_data = {
|
||||
"building_dimensions": {
|
||||
"width": self.building_width,
|
||||
"length": self.building_length,
|
||||
"height": self.building_height
|
||||
},
|
||||
"regions": []
|
||||
}
|
||||
|
||||
for region in self.regions:
|
||||
region_data = {
|
||||
"id": region.id,
|
||||
"name": region.name,
|
||||
"type": region.region_type,
|
||||
"boundary": {
|
||||
"x_min": region.boundary.x_min,
|
||||
"y_min": region.boundary.y_min,
|
||||
"x_max": region.boundary.x_max,
|
||||
"y_max": region.boundary.y_max,
|
||||
"z_min": region.boundary.z_min,
|
||||
"z_max": region.boundary.z_max
|
||||
},
|
||||
"material": region.material.value,
|
||||
"material_properties": region.material_properties,
|
||||
"usage": region.usage,
|
||||
"priority": region.priority,
|
||||
"user_density": region.user_density,
|
||||
"device_density": region.device_density,
|
||||
"interference_sensitivity": region.interference_sensitivity,
|
||||
"coverage_requirement": region.coverage_requirement
|
||||
}
|
||||
analysis_data["regions"].append(region_data)
|
||||
|
||||
with open(filepath, 'w') as f:
|
||||
json.dump(analysis_data, f, indent=2)
|
||||
|
||||
logging.info(f"Floor plan analysis exported to {filepath}")
|
||||
|
||||
def get_ap_placement_recommendations(self) -> Dict[str, Any]:
|
||||
"""Get recommendations for AP placement based on region analysis."""
|
||||
high_priority_regions = self.get_high_priority_regions()
|
||||
total_user_load = self.calculate_total_user_load()
|
||||
total_device_load = self.calculate_total_device_load()
|
||||
|
||||
# Calculate recommended AP count based on user/device load
|
||||
recommended_aps = max(
|
||||
len(high_priority_regions), # At least one AP per high-priority room
|
||||
int(total_user_load / 10), # One AP per 10 users
|
||||
int(total_device_load / 25) # One AP per 25 devices
|
||||
)
|
||||
|
||||
# Get optimal AP locations (room centers)
|
||||
ap_locations = []
|
||||
for region in high_priority_regions:
|
||||
center = region.boundary.center
|
||||
ap_locations.append({
|
||||
"region_id": region.id,
|
||||
"region_name": region.name,
|
||||
"x": center[0],
|
||||
"y": center[1],
|
||||
"z": center[2],
|
||||
"priority": region.priority,
|
||||
"user_density": region.user_density,
|
||||
"device_density": region.device_density
|
||||
})
|
||||
|
||||
return {
|
||||
"recommended_ap_count": recommended_aps,
|
||||
"ap_locations": ap_locations,
|
||||
"total_user_load": total_user_load,
|
||||
"total_device_load": total_device_load,
|
||||
"high_priority_regions": len(high_priority_regions),
|
||||
"interference_sensitive_regions": len(self.get_interference_sensitive_regions())
|
||||
}
|
||||
|
||||
def create_complex_polygonal_layout(self) -> List[BuildingRegion]:
|
||||
"""Create a complex office layout with polygonal regions for testing."""
|
||||
logging.info("Creating complex polygonal office layout...")
|
||||
|
||||
regions = []
|
||||
region_id = 1
|
||||
|
||||
# L-shaped office area
|
||||
l_office_polygon = [
|
||||
(2.0, 2.0), (15.0, 2.0), (15.0, 8.0), (10.0, 8.0), (10.0, 12.0), (2.0, 12.0)
|
||||
]
|
||||
l_office = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="L-Shaped Office",
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(2, 2, 15, 12),
|
||||
material=MaterialType.CARPET,
|
||||
material_properties={},
|
||||
usage="office",
|
||||
priority=4,
|
||||
user_density=0.15,
|
||||
device_density=0.25,
|
||||
polygon=l_office_polygon
|
||||
)
|
||||
regions.append(l_office)
|
||||
region_id += 1
|
||||
|
||||
# Circular conference room
|
||||
center_x, center_y = 25, 10
|
||||
radius = 6
|
||||
conference_polygon = []
|
||||
for i in range(16):
|
||||
angle = 2 * np.pi * i / 16
|
||||
x = center_x + radius * np.cos(angle)
|
||||
y = center_y + radius * np.sin(angle)
|
||||
conference_polygon.append((x, y))
|
||||
|
||||
conference = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="Circular Conference Room",
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(center_x - radius, center_y - radius,
|
||||
center_x + radius, center_y + radius),
|
||||
material=MaterialType.GLASS,
|
||||
material_properties={},
|
||||
usage="meeting",
|
||||
priority=5,
|
||||
user_density=0.3,
|
||||
device_density=0.4,
|
||||
interference_sensitivity=1.3,
|
||||
polygon=conference_polygon
|
||||
)
|
||||
regions.append(conference)
|
||||
region_id += 1
|
||||
|
||||
# Irregular open space
|
||||
open_space_polygon = [
|
||||
(18.0, 2.0), (35.0, 2.0), (35.0, 6.0), (30.0, 6.0), (30.0, 10.0), (25.0, 10.0), (25.0, 15.0), (18.0, 15.0)
|
||||
]
|
||||
open_space = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="Irregular Open Space",
|
||||
region_type="room",
|
||||
boundary=RegionBoundary(18, 2, 35, 15),
|
||||
material=MaterialType.CARPET,
|
||||
material_properties={},
|
||||
usage="collaboration",
|
||||
priority=3,
|
||||
user_density=0.2,
|
||||
device_density=0.3,
|
||||
polygon=open_space_polygon
|
||||
)
|
||||
regions.append(open_space)
|
||||
region_id += 1
|
||||
|
||||
# Triangular storage area
|
||||
storage_polygon = [
|
||||
(2.0, 15.0), (8.0, 15.0), (5.0, 20.0)
|
||||
]
|
||||
storage = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="Triangular Storage",
|
||||
region_type="facility",
|
||||
boundary=RegionBoundary(2, 15, 8, 20),
|
||||
material=MaterialType.DRYWALL,
|
||||
material_properties={},
|
||||
usage="storage",
|
||||
priority=1,
|
||||
user_density=0.01,
|
||||
device_density=0.05,
|
||||
polygon=storage_polygon
|
||||
)
|
||||
regions.append(storage)
|
||||
region_id += 1
|
||||
|
||||
# Hexagonal server room
|
||||
center_x, center_y = 35, 18
|
||||
radius = 4
|
||||
server_polygon = []
|
||||
for i in range(6):
|
||||
angle = 2 * np.pi * i / 6
|
||||
x = center_x + radius * np.cos(angle)
|
||||
y = center_y + radius * np.sin(angle)
|
||||
server_polygon.append((x, y))
|
||||
|
||||
server_room = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="Hexagonal Server Room",
|
||||
region_type="facility",
|
||||
boundary=RegionBoundary(center_x - radius, center_y - radius,
|
||||
center_x + radius, center_y + radius),
|
||||
material=MaterialType.CONCRETE,
|
||||
material_properties={},
|
||||
usage="server",
|
||||
priority=2,
|
||||
user_density=0.0,
|
||||
device_density=0.1,
|
||||
polygon=server_polygon
|
||||
)
|
||||
regions.append(server_room)
|
||||
region_id += 1
|
||||
|
||||
# Corridor with bends
|
||||
corridor_polygon = [
|
||||
(15.0, 8.0), (18.0, 8.0), (18.0, 10.0), (25.0, 10.0), (25.0, 12.0), (30.0, 12.0), (30.0, 15.0), (25.0, 15.0)
|
||||
]
|
||||
corridor = BuildingRegion(
|
||||
id=f"region_{region_id}",
|
||||
name="Bent Corridor",
|
||||
region_type="corridor",
|
||||
boundary=RegionBoundary(15, 8, 30, 15),
|
||||
material=MaterialType.TILE,
|
||||
material_properties={},
|
||||
usage="circulation",
|
||||
priority=1,
|
||||
user_density=0.02,
|
||||
device_density=0.05,
|
||||
polygon=corridor_polygon
|
||||
)
|
||||
regions.append(corridor)
|
||||
region_id += 1
|
||||
|
||||
self.regions = regions
|
||||
logging.info(f"Created {len(regions)} polygonal regions")
|
||||
|
||||
# Generate materials grid
|
||||
self._generate_materials_grid()
|
||||
|
||||
return regions
|
||||
|
||||
def parse_floor_plan_json(json_path: str) -> List[BuildingRegion]:
|
||||
"""
|
||||
Parse a floor plan JSON file and return a list of BuildingRegion objects.
|
||||
The JSON should contain a list of regions, each with:
|
||||
- name
|
||||
- type
|
||||
- boundary: list of (x, y) tuples or bounding box
|
||||
- material
|
||||
- usage (optional)
|
||||
- priority (optional)
|
||||
- user_density (optional)
|
||||
- device_density (optional)
|
||||
- polygon: list of (x, y) tuples for polygonal regions (optional)
|
||||
"""
|
||||
with open(json_path, 'r') as f:
|
||||
data = json.load(f)
|
||||
regions = []
|
||||
for i, region in enumerate(data.get('regions', [])):
|
||||
# Handle polygon definition
|
||||
polygon = None
|
||||
if 'polygon' in region and isinstance(region['polygon'], list):
|
||||
polygon = [(float(pt[0]), float(pt[1])) for pt in region['polygon'] if len(pt) >= 2]
|
||||
|
||||
# Handle boundary definition
|
||||
if 'boundary' in region and isinstance(region['boundary'], dict):
|
||||
b = region['boundary']
|
||||
boundary = RegionBoundary(
|
||||
x_min=b['x_min'], y_min=b['y_min'],
|
||||
x_max=b['x_max'], y_max=b['y_max'],
|
||||
z_min=b.get('z_min', 0.0), z_max=b.get('z_max', 3.0)
|
||||
)
|
||||
elif polygon:
|
||||
# Compute bounding box from polygon
|
||||
xs = [pt[0] for pt in polygon]
|
||||
ys = [pt[1] for pt in polygon]
|
||||
boundary = RegionBoundary(
|
||||
x_min=min(xs), y_min=min(ys),
|
||||
x_max=max(xs), y_max=max(ys),
|
||||
z_min=region.get('z_min', 0.0), z_max=region.get('z_max', 3.0)
|
||||
)
|
||||
else:
|
||||
continue
|
||||
|
||||
mat = MaterialType(region.get('material', 'air'))
|
||||
regions.append(BuildingRegion(
|
||||
id=region.get('id', f'region_{i+1}'),
|
||||
name=region.get('name', f'Region {i+1}'),
|
||||
region_type=region.get('type', 'room'),
|
||||
boundary=boundary,
|
||||
material=mat,
|
||||
material_properties=region.get('material_properties', {}),
|
||||
usage=region.get('usage', 'general'),
|
||||
priority=region.get('priority', 1),
|
||||
user_density=region.get('user_density', 0.1),
|
||||
device_density=region.get('device_density', 0.15),
|
||||
interference_sensitivity=region.get('interference_sensitivity', 1.0),
|
||||
coverage_requirement=region.get('coverage_requirement', 0.9),
|
||||
polygon=polygon
|
||||
))
|
||||
return regions
|
||||
|
||||
def point_in_polygon(x: float, y: float, polygon: List[Tuple[float, float]]) -> bool:
|
||||
"""Ray casting algorithm for point-in-polygon test."""
|
||||
n = len(polygon)
|
||||
inside = False
|
||||
px, py = x, y
|
||||
for i in range(n):
|
||||
xi, yi = polygon[i]
|
||||
xj, yj = polygon[(i + 1) % n]
|
||||
if ((yi > py) != (yj > py)) and (
|
||||
px < (xj - xi) * (py - yi) / (yj - yi + 1e-12) + xi):
|
||||
inside = not inside
|
||||
return inside
|
||||
|
||||
# Optionally, add a method to FloorPlanAnalyzer to use this parser
|
||||
setattr(FloorPlanAnalyzer, 'parse_floor_plan_json', staticmethod(parse_floor_plan_json))
|
||||
setattr(FloorPlanAnalyzer, 'point_in_polygon', staticmethod(point_in_polygon))
|
||||
4204
src/main_four_ap.py
Normal file
4204
src/main_four_ap.py
Normal file
File diff suppressed because it is too large
Load Diff
65
src/models/wifi_classifier.py
Normal file
65
src/models/wifi_classifier.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""
|
||||
WiFiSignalPredictor: Unified ML/Physics/Hybrid WiFi Signal Prediction
|
||||
- Supports advanced models, feature engineering, augmentation, uncertainty, transfer learning
|
||||
"""
|
||||
import numpy as np
|
||||
from .wifi_models import WiFiModelFactory, fine_tune_model, HybridPhysicsMLModel
|
||||
from src.preprocessing.data_augmentation import add_thermal_noise, add_interference, add_fading, simulate_environmental_variability
|
||||
from src.preprocessing.feature_engineering import build_feature_matrix
|
||||
|
||||
class WiFiSignalPredictor:
|
||||
"""
|
||||
Unified WiFi signal predictor supporting advanced ML, hybrid, and uncertainty-aware models.
|
||||
Usage:
|
||||
predictor = WiFiSignalPredictor(model_type='xgboost')
|
||||
predictor.fit(aps, rxs, obstacles, wall_segments, y)
|
||||
y_pred = predictor.predict(aps, rxs, obstacles, wall_segments)
|
||||
"""
|
||||
def __init__(self, model_type='random_forest', model_kwargs=None, physics_model=None):
|
||||
self.model_type = model_type
|
||||
self.model_kwargs = model_kwargs or {}
|
||||
self.physics_model = physics_model
|
||||
self.model = WiFiModelFactory.create(model_type, **self.model_kwargs)
|
||||
self.is_fitted = False
|
||||
def fit(self, aps, rxs, obstacles, wall_segments, y, augment=True, fade_type='rayleigh'):
|
||||
"""
|
||||
Fit the model. Optionally augment data with noise, interference, fading, and environmental variability.
|
||||
"""
|
||||
X = build_feature_matrix(aps, rxs, obstacles, wall_segments)
|
||||
y_aug = y.copy()
|
||||
if augment:
|
||||
y_aug = add_thermal_noise(y_aug)
|
||||
y_aug = add_interference(y_aug)
|
||||
y_aug = add_fading(y_aug, fading_type=fade_type)
|
||||
# Optionally augment features
|
||||
# X = simulate_environmental_variability(X) # Uncomment if using structured arrays
|
||||
self.model.fit(X, y_aug)
|
||||
self.is_fitted = True
|
||||
return self
|
||||
def predict(self, aps, rxs, obstacles, wall_segments, return_uncertainty=False):
|
||||
"""
|
||||
Predict RSSI. If model supports uncertainty, return (mean, variance).
|
||||
"""
|
||||
X = build_feature_matrix(aps, rxs, obstacles, wall_segments)
|
||||
if hasattr(self.model, 'predict'):
|
||||
if return_uncertainty and hasattr(self.model, 'predict') and 'return_std' in self.model.predict.__code__.co_varnames:
|
||||
mean, var = self.model.predict(X, return_std=True)
|
||||
return mean, var
|
||||
else:
|
||||
return self.model.predict(X)
|
||||
else:
|
||||
raise ValueError("Model does not support prediction")
|
||||
def fine_tune(self, aps, rxs, obstacles, wall_segments, y_new):
|
||||
"""
|
||||
Fine-tune the model on new data (transfer learning).
|
||||
"""
|
||||
X_new = build_feature_matrix(aps, rxs, obstacles, wall_segments)
|
||||
self.model = fine_tune_model(self.model, X_new, y_new)
|
||||
return self
|
||||
def set_physics_model(self, physics_model):
|
||||
"""
|
||||
Set or update the physics model for hybrid use.
|
||||
"""
|
||||
self.physics_model = physics_model
|
||||
if isinstance(self.model, HybridPhysicsMLModel):
|
||||
self.model.physics_model = physics_model
|
||||
111
src/models/wifi_models.py
Normal file
111
src/models/wifi_models.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
WiFi ML Models: Advanced Ensemble, Uncertainty, Hybrid, and Transfer Learning
|
||||
|
||||
- XGBoost/LightGBM support
|
||||
- GPR with uncertainty quantification
|
||||
- Hybrid physics-ML model
|
||||
- Transfer learning utility
|
||||
- Unified interface
|
||||
"""
|
||||
import numpy as np
|
||||
from sklearn.ensemble import RandomForestRegressor
|
||||
from sklearn.gaussian_process import GaussianProcessRegressor
|
||||
from sklearn.gaussian_process.kernels import RBF, ConstantKernel as C
|
||||
from sklearn.base import BaseEstimator, RegressorMixin
|
||||
import logging
|
||||
|
||||
try:
|
||||
import xgboost as xgb
|
||||
XGBOOST_AVAILABLE = True
|
||||
except ImportError:
|
||||
XGBOOST_AVAILABLE = False
|
||||
try:
|
||||
import lightgbm as lgb
|
||||
LIGHTGBM_AVAILABLE = True
|
||||
except ImportError:
|
||||
LIGHTGBM_AVAILABLE = False
|
||||
|
||||
class XGBoostRegressor(BaseEstimator, RegressorMixin):
|
||||
def __init__(self, **kwargs):
|
||||
if not XGBOOST_AVAILABLE:
|
||||
raise ImportError("xgboost is not installed")
|
||||
self.model = xgb.XGBRegressor(**kwargs)
|
||||
def fit(self, X, y):
|
||||
return self.model.fit(X, y)
|
||||
def predict(self, X):
|
||||
return self.model.predict(X)
|
||||
|
||||
class LightGBMRegressor(BaseEstimator, RegressorMixin):
|
||||
def __init__(self, **kwargs):
|
||||
if not LIGHTGBM_AVAILABLE:
|
||||
raise ImportError("lightgbm is not installed")
|
||||
self.model = lgb.LGBMRegressor(**kwargs)
|
||||
def fit(self, X, y):
|
||||
return self.model.fit(X, y)
|
||||
def predict(self, X):
|
||||
return self.model.predict(X)
|
||||
|
||||
class GPRWithUncertainty(BaseEstimator, RegressorMixin):
|
||||
def __init__(self, **kwargs):
|
||||
kernel = kwargs.pop('kernel', C(1.0) * RBF(1.0))
|
||||
self.model = GaussianProcessRegressor(kernel=kernel, **kwargs)
|
||||
def fit(self, X, y):
|
||||
return self.model.fit(X, y)
|
||||
def predict(self, X, return_std=False):
|
||||
mean, std = self.model.predict(X, return_std=True)
|
||||
if return_std:
|
||||
return mean, std**2 # Return variance
|
||||
return mean
|
||||
|
||||
class HybridPhysicsMLModel(BaseEstimator, RegressorMixin):
|
||||
"""
|
||||
Hybrid model: physics for baseline, ML for correction.
|
||||
physics_model: callable (X) -> baseline_rssi
|
||||
ml_model: scikit-learn regressor (fit/predict)
|
||||
"""
|
||||
def __init__(self, physics_model, ml_model=None):
|
||||
self.physics_model = physics_model
|
||||
self.ml_model = ml_model or RandomForestRegressor(n_estimators=50)
|
||||
self.is_fitted = False
|
||||
def fit(self, X, y):
|
||||
baseline = self.physics_model(X)
|
||||
residual = y - baseline
|
||||
self.ml_model.fit(X, residual)
|
||||
self.is_fitted = True
|
||||
return self
|
||||
def predict(self, X):
|
||||
baseline = self.physics_model(X)
|
||||
correction = self.ml_model.predict(X)
|
||||
return baseline + correction
|
||||
|
||||
# Unified model factory
|
||||
class WiFiModelFactory:
|
||||
@staticmethod
|
||||
def create(model_type, **kwargs):
|
||||
if model_type == 'random_forest':
|
||||
return RandomForestRegressor(**kwargs)
|
||||
elif model_type == 'xgboost':
|
||||
return XGBoostRegressor(**kwargs)
|
||||
elif model_type == 'lightgbm':
|
||||
return LightGBMRegressor(**kwargs)
|
||||
elif model_type == 'gpr':
|
||||
return GPRWithUncertainty(**kwargs)
|
||||
elif model_type == 'hybrid':
|
||||
return HybridPhysicsMLModel(**kwargs)
|
||||
else:
|
||||
raise ValueError(f"Unknown model_type: {model_type}")
|
||||
|
||||
# Transfer learning utility
|
||||
def fine_tune_model(pretrained_model, X_new, y_new, n_epochs=5):
|
||||
"""Fine-tune a pre-trained model on new data (for tree-based models, refit; for GPR, re-fit)."""
|
||||
if hasattr(pretrained_model, 'fit'):
|
||||
# For tree-based models, concatenate and refit
|
||||
if hasattr(pretrained_model, 'estimators_') or hasattr(pretrained_model, 'booster_'):
|
||||
# Assume we have access to old data (not always possible)
|
||||
# If not, just fit on new data
|
||||
pretrained_model.fit(X_new, y_new)
|
||||
else:
|
||||
pretrained_model.fit(X_new, y_new)
|
||||
else:
|
||||
raise ValueError("Model does not support fine-tuning")
|
||||
return pretrained_model
|
||||
1
src/physics/__init__.py
Normal file
1
src/physics/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Physics calculations package."""
|
||||
691
src/physics/adaptive_voxel_system.py
Normal file
691
src/physics/adaptive_voxel_system.py
Normal file
@@ -0,0 +1,691 @@
|
||||
"""
|
||||
Adaptive Voxel System for Advanced WiFi Propagation Modeling
|
||||
|
||||
This module implements:
|
||||
- Adaptive voxel resolution based on signal variability and obstacle density
|
||||
- Optimized 3D voxel traversal with unified 2D/3D handling
|
||||
- Numerical stability and edge case handling
|
||||
- Comprehensive error handling and logging
|
||||
- Performance optimization with caching and vectorization
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import logging
|
||||
from typing import List, Tuple, Optional, Union, Dict, Set
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
import warnings
|
||||
from scipy.spatial import cKDTree
|
||||
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
|
||||
import multiprocessing as mp
|
||||
from functools import lru_cache
|
||||
import time
|
||||
import traceback
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class VoxelType(Enum):
|
||||
"""Types of voxels for adaptive resolution."""
|
||||
HIGH_RESOLUTION = "high_resolution" # Near APs, obstacles, high variability
|
||||
MEDIUM_RESOLUTION = "medium_resolution" # Normal areas
|
||||
LOW_RESOLUTION = "low_resolution" # Open spaces, far from APs
|
||||
|
||||
@dataclass
|
||||
class VoxelConfig:
|
||||
"""Configuration for adaptive voxel system."""
|
||||
base_resolution: float = 0.2 # Base resolution in meters
|
||||
high_res_multiplier: float = 4.0 # High resolution = base_res / multiplier
|
||||
medium_res_multiplier: float = 2.0
|
||||
low_res_multiplier: float = 0.5 # Low resolution = base_res * multiplier
|
||||
|
||||
# Adaptive resolution parameters
|
||||
ap_influence_radius: float = 5.0 # Meters around APs for high resolution
|
||||
obstacle_influence_radius: float = 2.0 # Meters around obstacles
|
||||
variability_threshold: float = 0.1 # Signal variability threshold for high resolution
|
||||
|
||||
# Performance parameters
|
||||
max_voxels_per_dimension: int = 1000 # Maximum voxels per dimension
|
||||
cache_size: int = 10000 # LRU cache size for path calculations
|
||||
parallel_threshold: int = 100 # Minimum points for parallel processing
|
||||
|
||||
class AdaptiveVoxelSystem:
|
||||
"""
|
||||
Advanced voxel system with adaptive resolution and optimized traversal.
|
||||
"""
|
||||
|
||||
def __init__(self, config: VoxelConfig = None):
|
||||
"""Initialize the adaptive voxel system."""
|
||||
self.config = config or VoxelConfig()
|
||||
self.materials_grid = None
|
||||
self.voxel_types = None
|
||||
self.resolution_map = None
|
||||
self.ap_locations = []
|
||||
self.obstacle_locations = []
|
||||
|
||||
# Performance tracking
|
||||
self.calculation_times = []
|
||||
self.cache_hits = 0
|
||||
self.cache_misses = 0
|
||||
|
||||
# Initialize caches
|
||||
self._path_cache = {}
|
||||
self._material_cache = {}
|
||||
|
||||
logger.info("Adaptive Voxel System initialized")
|
||||
|
||||
def set_materials_grid(self, materials_grid: np.ndarray):
|
||||
"""Set the 3D materials grid."""
|
||||
try:
|
||||
self.materials_grid = materials_grid
|
||||
logger.info(f"Materials grid set with shape: {materials_grid.shape}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting materials grid: {e}")
|
||||
raise
|
||||
|
||||
def set_ap_locations(self, ap_locations: List[Tuple[float, float, float]]):
|
||||
"""Set AP locations for adaptive resolution."""
|
||||
self.ap_locations = ap_locations
|
||||
logger.info(f"AP locations set: {len(ap_locations)} APs")
|
||||
|
||||
def set_obstacle_locations(self, obstacle_locations: List[Tuple[float, float, float]]):
|
||||
"""Set obstacle locations for adaptive resolution."""
|
||||
self.obstacle_locations = obstacle_locations
|
||||
logger.info(f"Obstacle locations set: {len(obstacle_locations)} obstacles")
|
||||
|
||||
def calculate_adaptive_resolution(self, building_dimensions: Tuple[float, float, float]):
|
||||
"""
|
||||
Calculate adaptive voxel resolution based on APs, obstacles, and signal variability.
|
||||
|
||||
Args:
|
||||
building_dimensions: (width, length, height) in meters
|
||||
|
||||
Returns:
|
||||
resolution_map: 3D array of resolution values
|
||||
"""
|
||||
try:
|
||||
width, length, height = building_dimensions
|
||||
|
||||
# Initialize resolution map with base resolution
|
||||
nx = int(width / self.config.base_resolution)
|
||||
ny = int(length / self.config.base_resolution)
|
||||
nz = int(height / self.config.base_resolution)
|
||||
|
||||
# Limit maximum voxels per dimension
|
||||
nx = min(nx, self.config.max_voxels_per_dimension)
|
||||
ny = min(ny, self.config.max_voxels_per_dimension)
|
||||
nz = min(nz, self.config.max_voxels_per_dimension)
|
||||
|
||||
self.resolution_map = np.full((nz, ny, nx), self.config.base_resolution)
|
||||
|
||||
# Apply adaptive resolution based on AP locations
|
||||
self._apply_ap_based_resolution(width, length, height)
|
||||
|
||||
# Apply adaptive resolution based on obstacles
|
||||
self._apply_obstacle_based_resolution(width, length, height)
|
||||
|
||||
# Apply adaptive resolution based on signal variability
|
||||
self._apply_variability_based_resolution(width, length, height)
|
||||
|
||||
logger.info(f"Adaptive resolution calculated: {nx}x{ny}x{nz} voxels")
|
||||
return self.resolution_map
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating adaptive resolution: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
raise
|
||||
|
||||
def _apply_ap_based_resolution(self, width: float, length: float, height: float):
|
||||
"""Apply high resolution around AP locations."""
|
||||
if not self.ap_locations:
|
||||
return
|
||||
|
||||
nx, ny, nz = self.resolution_map.shape
|
||||
|
||||
for ap_x, ap_y, ap_z in self.ap_locations:
|
||||
# Convert AP coordinates to grid indices
|
||||
gx = int(ap_x / width * nx)
|
||||
gy = int(ap_y / length * ny)
|
||||
gz = int(ap_z / height * nz)
|
||||
|
||||
# Calculate influence radius in grid units
|
||||
influence_radius = int(self.config.ap_influence_radius / self.config.base_resolution)
|
||||
|
||||
# Apply high resolution in influence area
|
||||
for dz in range(-influence_radius, influence_radius + 1):
|
||||
for dy in range(-influence_radius, influence_radius + 1):
|
||||
for dx in range(-influence_radius, influence_radius + 1):
|
||||
nx_idx = gx + dx
|
||||
ny_idx = gy + dy
|
||||
nz_idx = gz + dz
|
||||
|
||||
if (0 <= nx_idx < nx and 0 <= ny_idx < ny and 0 <= nz_idx < nz):
|
||||
distance = np.sqrt(dx**2 + dy**2 + dz**2)
|
||||
if distance <= influence_radius:
|
||||
# High resolution near APs
|
||||
self.resolution_map[nz_idx, ny_idx, nx_idx] = (
|
||||
self.config.base_resolution / self.config.high_res_multiplier
|
||||
)
|
||||
|
||||
def _apply_obstacle_based_resolution(self, width: float, length: float, height: float):
|
||||
"""Apply high resolution around obstacles."""
|
||||
if not self.obstacle_locations:
|
||||
return
|
||||
|
||||
nx, ny, nz = self.resolution_map.shape
|
||||
|
||||
for obs_x, obs_y, obs_z in self.obstacle_locations:
|
||||
# Convert obstacle coordinates to grid indices
|
||||
gx = int(obs_x / width * nx)
|
||||
gy = int(obs_y / length * ny)
|
||||
gz = int(obs_z / height * nz)
|
||||
|
||||
# Calculate influence radius in grid units
|
||||
influence_radius = int(self.config.obstacle_influence_radius / self.config.base_resolution)
|
||||
|
||||
# Apply high resolution in influence area
|
||||
for dz in range(-influence_radius, influence_radius + 1):
|
||||
for dy in range(-influence_radius, influence_radius + 1):
|
||||
for dx in range(-influence_radius, influence_radius + 1):
|
||||
nx_idx = gx + dx
|
||||
ny_idx = gy + dy
|
||||
nz_idx = gz + dz
|
||||
|
||||
if (0 <= nx_idx < nx and 0 <= ny_idx < ny and 0 <= nz_idx < nz):
|
||||
distance = np.sqrt(dx**2 + dy**2 + dz**2)
|
||||
if distance <= influence_radius:
|
||||
# High resolution near obstacles
|
||||
current_res = self.resolution_map[nz_idx, ny_idx, nx_idx]
|
||||
high_res = self.config.base_resolution / self.config.high_res_multiplier
|
||||
self.resolution_map[nz_idx, ny_idx, nx_idx] = min(current_res, high_res)
|
||||
|
||||
def _apply_variability_based_resolution(self, width: float, length: float, height: float):
|
||||
"""Apply resolution based on signal variability (simplified model)."""
|
||||
# This is a simplified implementation
|
||||
# In a full implementation, this would analyze signal variability patterns
|
||||
pass
|
||||
|
||||
@lru_cache(maxsize=10000)
|
||||
def get_optimized_path_points(self, start: Tuple[float, float, float],
|
||||
end: Tuple[float, float, float]) -> List[Tuple[float, float, float]]:
|
||||
"""
|
||||
Get optimized path points using adaptive resolution and unified 3D/2D handling.
|
||||
|
||||
Args:
|
||||
start: Starting point (x, y, z)
|
||||
end: Ending point (x, y, z)
|
||||
|
||||
Returns:
|
||||
List of path points with appropriate resolution
|
||||
"""
|
||||
try:
|
||||
# Check if points are very close
|
||||
distance = np.sqrt(sum((end[i] - start[i])**2 for i in range(3)))
|
||||
if distance < 1e-6:
|
||||
return [start]
|
||||
|
||||
# Determine if we need 2D or 3D traversal
|
||||
if abs(start[2] - end[2]) < 1e-3:
|
||||
# 2D traversal (same z-level)
|
||||
return self._get_2d_path_points(start, end)
|
||||
else:
|
||||
# 3D traversal
|
||||
return self._get_3d_path_points(start, end)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in get_optimized_path_points: {e}")
|
||||
# Fallback to simple linear interpolation
|
||||
return self._get_fallback_path_points(start, end)
|
||||
|
||||
def _get_3d_path_points(self, start: Tuple[float, float, float],
|
||||
end: Tuple[float, float, float]) -> List[Tuple[float, float, float]]:
|
||||
"""Get 3D path points using optimized Bresenham algorithm."""
|
||||
try:
|
||||
x1, y1, z1 = start
|
||||
x2, y2, z2 = end
|
||||
|
||||
# Use adaptive resolution for coordinate conversion
|
||||
if self.resolution_map is not None:
|
||||
# Get resolution at start point
|
||||
start_res = self._get_resolution_at_point(x1, y1, z1)
|
||||
end_res = self._get_resolution_at_point(x2, y2, z2)
|
||||
resolution = min(start_res, end_res)
|
||||
else:
|
||||
resolution = self.config.base_resolution
|
||||
|
||||
# Convert to grid coordinates
|
||||
gx1, gy1, gz1 = int(x1 / resolution), int(y1 / resolution), int(z1 / resolution)
|
||||
gx2, gy2, gz2 = int(x2 / resolution), int(y2 / resolution), int(z2 / resolution)
|
||||
|
||||
# Optimized 3D Bresenham algorithm
|
||||
points = []
|
||||
dx = abs(gx2 - gx1)
|
||||
dy = abs(gy2 - gy1)
|
||||
dz = abs(gz2 - gz1)
|
||||
|
||||
xs = 1 if gx2 > gx1 else -1
|
||||
ys = 1 if gy2 > gy1 else -1
|
||||
zs = 1 if gz2 > gz1 else -1
|
||||
|
||||
# Driving axis is X
|
||||
if dx >= dy and dx >= dz:
|
||||
p1 = 2 * dy - dx
|
||||
p2 = 2 * dz - dx
|
||||
while gx1 != gx2:
|
||||
points.append((gx1 * resolution, gy1 * resolution, gz1 * resolution))
|
||||
if p1 >= 0:
|
||||
gy1 += ys
|
||||
p1 -= 2 * dx
|
||||
if p2 >= 0:
|
||||
gz1 += zs
|
||||
p2 -= 2 * dx
|
||||
p1 += 2 * dy
|
||||
p2 += 2 * dz
|
||||
gx1 += xs
|
||||
# Driving axis is Y
|
||||
elif dy >= dx and dy >= dz:
|
||||
p1 = 2 * dx - dy
|
||||
p2 = 2 * dz - dy
|
||||
while gy1 != gy2:
|
||||
points.append((gx1 * resolution, gy1 * resolution, gz1 * resolution))
|
||||
if p1 >= 0:
|
||||
gx1 += xs
|
||||
p1 -= 2 * dy
|
||||
if p2 >= 0:
|
||||
gz1 += zs
|
||||
p2 -= 2 * dy
|
||||
p1 += 2 * dx
|
||||
p2 += 2 * dz
|
||||
gy1 += ys
|
||||
# Driving axis is Z
|
||||
else:
|
||||
p1 = 2 * dy - dz
|
||||
p2 = 2 * dx - dz
|
||||
while gz1 != gz2:
|
||||
points.append((gx1 * resolution, gy1 * resolution, gz1 * resolution))
|
||||
if p1 >= 0:
|
||||
gy1 += ys
|
||||
p1 -= 2 * dz
|
||||
if p2 >= 0:
|
||||
gx1 += xs
|
||||
p2 -= 2 * dz
|
||||
p1 += 2 * dy
|
||||
p2 += 2 * dx
|
||||
gz1 += zs
|
||||
|
||||
points.append((gx2 * resolution, gy2 * resolution, gz2 * resolution))
|
||||
return points
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in 3D path calculation: {e}")
|
||||
return self._get_fallback_path_points(start, end)
|
||||
|
||||
def _get_2d_path_points(self, start: Tuple[float, float, float],
|
||||
end: Tuple[float, float, float]) -> List[Tuple[float, float, float]]:
|
||||
"""Get 2D path points (same z-level) using optimized algorithm."""
|
||||
try:
|
||||
x1, y1, z1 = start
|
||||
x2, y2, z2 = end
|
||||
|
||||
# Use adaptive resolution
|
||||
if self.resolution_map is not None:
|
||||
resolution = min(
|
||||
self._get_resolution_at_point(x1, y1, z1),
|
||||
self._get_resolution_at_point(x2, y2, z2)
|
||||
)
|
||||
else:
|
||||
resolution = self.config.base_resolution
|
||||
|
||||
# Convert to grid coordinates
|
||||
gx1, gy1 = int(x1 / resolution), int(y1 / resolution)
|
||||
gx2, gy2 = int(x2 / resolution), int(y2 / resolution)
|
||||
|
||||
# Optimized 2D Bresenham algorithm
|
||||
points = []
|
||||
dx = abs(gx2 - gx1)
|
||||
dy = abs(gy2 - gy1)
|
||||
|
||||
sx = 1 if gx2 > gx1 else -1
|
||||
sy = 1 if gy2 > gy1 else -1
|
||||
|
||||
err = dx - dy
|
||||
|
||||
while True:
|
||||
points.append((gx1 * resolution, gy1 * resolution, z1))
|
||||
|
||||
if gx1 == gx2 and gy1 == gy2:
|
||||
break
|
||||
|
||||
e2 = 2 * err
|
||||
if e2 > -dy:
|
||||
err -= dy
|
||||
gx1 += sx
|
||||
if e2 < dx:
|
||||
err += dx
|
||||
gy1 += sy
|
||||
|
||||
return points
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in 2D path calculation: {e}")
|
||||
return self._get_fallback_path_points(start, end)
|
||||
|
||||
def _get_fallback_path_points(self, start: Tuple[float, float, float],
|
||||
end: Tuple[float, float, float]) -> List[Tuple[float, float, float]]:
|
||||
"""Fallback path calculation using linear interpolation."""
|
||||
try:
|
||||
distance = np.sqrt(sum((end[i] - start[i])**2 for i in range(3)))
|
||||
if distance < 1e-6:
|
||||
return [start]
|
||||
|
||||
# Simple linear interpolation
|
||||
num_points = max(2, int(distance / self.config.base_resolution))
|
||||
points = []
|
||||
|
||||
for i in range(num_points):
|
||||
t = i / (num_points - 1)
|
||||
point = tuple(start[j] + t * (end[j] - start[j]) for j in range(3))
|
||||
points.append(point)
|
||||
|
||||
return points
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in fallback path calculation: {e}")
|
||||
return [start, end]
|
||||
|
||||
def _get_resolution_at_point(self, x: float, y: float, z: float) -> float:
|
||||
"""Get resolution at a specific point."""
|
||||
try:
|
||||
if self.resolution_map is None:
|
||||
return self.config.base_resolution
|
||||
|
||||
# Convert to grid indices
|
||||
nx, ny, nz = self.resolution_map.shape
|
||||
|
||||
# Get building dimensions (assume 1:1 mapping for now)
|
||||
gx = int(x / self.config.base_resolution)
|
||||
gy = int(y / self.config.base_resolution)
|
||||
gz = int(z / self.config.base_resolution)
|
||||
|
||||
# Clamp to grid bounds
|
||||
gx = max(0, min(gx, nx - 1))
|
||||
gy = max(0, min(gy, ny - 1))
|
||||
gz = max(0, min(gz, nz - 1))
|
||||
|
||||
return self.resolution_map[gz, gy, gx]
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error getting resolution at point: {e}")
|
||||
return self.config.base_resolution
|
||||
|
||||
def calculate_material_attenuation_optimized(self, start: Tuple[float, float, float],
|
||||
end: Tuple[float, float, float],
|
||||
materials_grid) -> float:
|
||||
"""
|
||||
Calculate material attenuation along path with optimized performance.
|
||||
|
||||
Args:
|
||||
start: Starting point
|
||||
end: Ending point
|
||||
materials_grid: 3D materials grid
|
||||
|
||||
Returns:
|
||||
Total attenuation in dB
|
||||
"""
|
||||
try:
|
||||
start_time = time.time()
|
||||
|
||||
# Get optimized path points
|
||||
path_points = self.get_optimized_path_points(start, end)
|
||||
|
||||
total_attenuation = 0.0
|
||||
seen_materials = set()
|
||||
|
||||
for i, point in enumerate(path_points):
|
||||
# Get material at this point
|
||||
material = self._get_material_at_point_optimized(point, materials_grid)
|
||||
|
||||
if material is None or material.name == 'Air':
|
||||
continue
|
||||
|
||||
# Calculate segment length
|
||||
if i < len(path_points) - 1:
|
||||
next_point = path_points[i + 1]
|
||||
segment_length = np.sqrt(sum((next_point[j] - point[j])**2 for j in range(3)))
|
||||
else:
|
||||
segment_length = 0.1 # Default segment length
|
||||
|
||||
# Calculate attenuation for this material segment
|
||||
if hasattr(material, 'calculate_attenuation'):
|
||||
segment_atten = material.calculate_attenuation(2.4e9, segment_length)
|
||||
else:
|
||||
segment_atten = 0.0
|
||||
|
||||
# Avoid double-counting same material
|
||||
material_key = (material.name, point[0], point[1], point[2])
|
||||
if material_key not in seen_materials:
|
||||
total_attenuation += segment_atten
|
||||
seen_materials.add(material_key)
|
||||
|
||||
# Track performance
|
||||
calculation_time = time.time() - start_time
|
||||
self.calculation_times.append(calculation_time)
|
||||
|
||||
return total_attenuation
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in material attenuation calculation: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
return 0.0
|
||||
|
||||
def _get_material_at_point_optimized(self, point: Tuple[float, float, float],
|
||||
materials_grid) -> Optional:
|
||||
"""Get material at point with optimized lookup."""
|
||||
try:
|
||||
if materials_grid is None:
|
||||
return None
|
||||
|
||||
x, y, z = point
|
||||
|
||||
# Use adaptive resolution for grid lookup
|
||||
resolution = self._get_resolution_at_point(x, y, z)
|
||||
|
||||
# Convert to grid coordinates
|
||||
gx = int(x / resolution)
|
||||
gy = int(y / resolution)
|
||||
gz = int(z / resolution)
|
||||
|
||||
# Check bounds
|
||||
if (0 <= gz < len(materials_grid) and
|
||||
0 <= gy < len(materials_grid[0]) and
|
||||
0 <= gx < len(materials_grid[0][0])):
|
||||
return materials_grid[gz][gy][gx]
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error getting material at point: {e}")
|
||||
return None
|
||||
|
||||
def calculate_rssi_batch_parallel(self, ap_locations: List[Tuple[float, float, float]],
|
||||
points: List[Tuple[float, float, float]],
|
||||
materials_grid,
|
||||
tx_power: float = 20.0,
|
||||
max_workers: int = None) -> np.ndarray:
|
||||
"""
|
||||
Calculate RSSI for multiple APs and points in parallel.
|
||||
|
||||
Args:
|
||||
ap_locations: List of AP coordinates
|
||||
points: List of receiver points
|
||||
materials_grid: 3D materials grid
|
||||
tx_power: Transmit power in dBm
|
||||
max_workers: Maximum number of parallel workers
|
||||
|
||||
Returns:
|
||||
RSSI matrix: shape (num_aps, num_points)
|
||||
"""
|
||||
try:
|
||||
if max_workers is None:
|
||||
max_workers = min(mp.cpu_count(), len(ap_locations))
|
||||
|
||||
num_aps = len(ap_locations)
|
||||
num_points = len(points)
|
||||
|
||||
# Initialize RSSI matrix
|
||||
rssi_matrix = np.full((num_aps, num_points), -100.0)
|
||||
|
||||
# Use parallel processing for large batches
|
||||
if num_aps * num_points > self.config.parallel_threshold:
|
||||
logger.info(f"Using parallel processing with {max_workers} workers")
|
||||
|
||||
with ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
# Submit tasks for each AP
|
||||
futures = []
|
||||
for ap_idx, ap_location in enumerate(ap_locations):
|
||||
future = executor.submit(
|
||||
self._calculate_rssi_for_ap,
|
||||
ap_location, points, materials_grid, tx_power
|
||||
)
|
||||
futures.append((ap_idx, future))
|
||||
|
||||
# Collect results
|
||||
for ap_idx, future in futures:
|
||||
try:
|
||||
rssi_values = future.result()
|
||||
rssi_matrix[ap_idx, :] = rssi_values
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating RSSI for AP {ap_idx}: {e}")
|
||||
rssi_matrix[ap_idx, :] = -100.0
|
||||
else:
|
||||
# Sequential processing for small batches
|
||||
for ap_idx, ap_location in enumerate(ap_locations):
|
||||
rssi_values = self._calculate_rssi_for_ap(
|
||||
ap_location, points, materials_grid, tx_power
|
||||
)
|
||||
rssi_matrix[ap_idx, :] = rssi_values
|
||||
|
||||
return rssi_matrix
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in batch RSSI calculation: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
return np.full((len(ap_locations), len(points)), -100.0)
|
||||
|
||||
def _calculate_rssi_for_ap(self, ap_location: Tuple[float, float, float],
|
||||
points: List[Tuple[float, float, float]],
|
||||
materials_grid,
|
||||
tx_power: float) -> np.ndarray:
|
||||
"""Calculate RSSI for one AP at multiple points."""
|
||||
try:
|
||||
rssi_values = []
|
||||
|
||||
for point in points:
|
||||
# Calculate distance
|
||||
distance = np.sqrt(sum((ap_location[i] - point[i])**2 for i in range(3)))
|
||||
|
||||
if distance < 1e-6:
|
||||
rssi_values.append(tx_power)
|
||||
continue
|
||||
|
||||
# Free space path loss
|
||||
wavelength = 3e8 / 2.4e9
|
||||
free_space_loss = 20 * np.log10(4 * np.pi * distance / wavelength)
|
||||
|
||||
# Material attenuation
|
||||
material_attenuation = self.calculate_material_attenuation_optimized(
|
||||
ap_location, point, materials_grid
|
||||
)
|
||||
|
||||
# Total RSSI
|
||||
rssi = tx_power - free_space_loss - material_attenuation
|
||||
rssi_values.append(rssi)
|
||||
|
||||
return np.array(rssi_values)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating RSSI for AP: {e}")
|
||||
return np.full(len(points), -100.0)
|
||||
|
||||
def get_performance_stats(self) -> Dict:
|
||||
"""Get performance statistics."""
|
||||
if not self.calculation_times:
|
||||
return {
|
||||
'avg_calculation_time': 0.0,
|
||||
'total_calculations': 0,
|
||||
'cache_hit_rate': 0.0,
|
||||
'total_cache_hits': self.cache_hits,
|
||||
'total_cache_misses': self.cache_misses
|
||||
}
|
||||
|
||||
avg_time = np.mean(self.calculation_times)
|
||||
total_calcs = len(self.calculation_times)
|
||||
|
||||
cache_hit_rate = 0.0
|
||||
if self.cache_hits + self.cache_misses > 0:
|
||||
cache_hit_rate = self.cache_hits / (self.cache_hits + self.cache_misses)
|
||||
|
||||
return {
|
||||
'avg_calculation_time': avg_time,
|
||||
'total_calculations': total_calcs,
|
||||
'cache_hit_rate': cache_hit_rate,
|
||||
'total_cache_hits': self.cache_hits,
|
||||
'total_cache_misses': self.cache_misses
|
||||
}
|
||||
|
||||
def clear_caches(self):
|
||||
"""Clear all caches."""
|
||||
self._path_cache.clear()
|
||||
self._material_cache.clear()
|
||||
self.get_optimized_path_points.cache_clear()
|
||||
logger.info("All caches cleared")
|
||||
|
||||
def test_adaptive_voxel_system():
|
||||
"""Test the adaptive voxel system."""
|
||||
print("Testing Adaptive Voxel System...")
|
||||
|
||||
# Create test configuration
|
||||
config = VoxelConfig(
|
||||
base_resolution=0.2,
|
||||
high_res_multiplier=4.0,
|
||||
medium_res_multiplier=2.0,
|
||||
low_res_multiplier=0.5,
|
||||
ap_influence_radius=5.0,
|
||||
obstacle_influence_radius=2.0
|
||||
)
|
||||
|
||||
# Initialize system
|
||||
voxel_system = AdaptiveVoxelSystem(config)
|
||||
|
||||
# Set test data
|
||||
ap_locations = [(10.0, 10.0, 2.7), (30.0, 30.0, 2.7)]
|
||||
obstacle_locations = [(20.0, 20.0, 1.5)]
|
||||
|
||||
voxel_system.set_ap_locations(ap_locations)
|
||||
voxel_system.set_obstacle_locations(obstacle_locations)
|
||||
|
||||
# Calculate adaptive resolution
|
||||
building_dimensions = (40.0, 40.0, 3.0)
|
||||
resolution_map = voxel_system.calculate_adaptive_resolution(building_dimensions)
|
||||
|
||||
print(f"Resolution map shape: {resolution_map.shape}")
|
||||
print(f"Min resolution: {np.min(resolution_map):.3f} m")
|
||||
print(f"Max resolution: {np.max(resolution_map):.3f} m")
|
||||
print(f"Mean resolution: {np.mean(resolution_map):.3f} m")
|
||||
|
||||
# Test path calculation
|
||||
start_point = (5.0, 5.0, 1.5)
|
||||
end_point = (35.0, 35.0, 1.5)
|
||||
|
||||
path_points = voxel_system.get_optimized_path_points(start_point, end_point)
|
||||
print(f"Path points calculated: {len(path_points)}")
|
||||
|
||||
# Test performance
|
||||
stats = voxel_system.get_performance_stats()
|
||||
print(f"Performance stats: {stats}")
|
||||
|
||||
print("Adaptive Voxel System test completed successfully!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_adaptive_voxel_system()
|
||||
573
src/physics/materials.py
Normal file
573
src/physics/materials.py
Normal file
@@ -0,0 +1,573 @@
|
||||
"""Module for handling material properties and signal attenuation in WiFi environments with absolute precision."""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, Tuple, Optional, Union, Callable
|
||||
import numpy as np
|
||||
import math
|
||||
from scipy import constants
|
||||
from scipy.optimize import minimize_scalar
|
||||
import warnings
|
||||
|
||||
# Physical constants for precise calculations
|
||||
EPSILON_0 = constants.epsilon_0 # Vacuum permittivity (F/m)
|
||||
MU_0 = constants.mu_0 # Vacuum permeability (H/m)
|
||||
C = constants.c # Speed of light (m/s)
|
||||
ETA_0 = np.sqrt(MU_0 / EPSILON_0) # Intrinsic impedance of free space (Ω)
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FrequencyDependentProperty:
|
||||
"""Represents frequency-dependent material properties with interpolation."""
|
||||
frequencies: List[float] # Frequencies in Hz
|
||||
values: List[float] # Property values at each frequency
|
||||
|
||||
def get_value(self, frequency: float) -> float:
|
||||
"""Get interpolated value at given frequency."""
|
||||
if len(self.frequencies) == 1:
|
||||
return self.values[0]
|
||||
|
||||
# Find nearest frequency or interpolate
|
||||
if frequency <= self.frequencies[0]:
|
||||
return self.values[0]
|
||||
elif frequency >= self.frequencies[-1]:
|
||||
return self.values[-1]
|
||||
else:
|
||||
# Linear interpolation
|
||||
for i in range(len(self.frequencies) - 1):
|
||||
if self.frequencies[i] <= frequency <= self.frequencies[i + 1]:
|
||||
f1, f2 = self.frequencies[i], self.frequencies[i + 1]
|
||||
v1, v2 = self.values[i], self.values[i + 1]
|
||||
return v1 + (v2 - v1) * (frequency - f1) / (f2 - f1)
|
||||
|
||||
return self.values[-1] # Fallback
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AdvancedMaterial:
|
||||
"""Advanced material class with frequency-dependent properties and precise physics."""
|
||||
name: str
|
||||
# Frequency-dependent properties (can be single values or frequency-dependent)
|
||||
relative_permittivity: Union[float, FrequencyDependentProperty] # εᵣ
|
||||
conductivity: Union[float, FrequencyDependentProperty] # σ (S/m)
|
||||
relative_permeability: Union[float, FrequencyDependentProperty] = 1.0 # μᵣ
|
||||
loss_tangent: Optional[Union[float, FrequencyDependentProperty]] = None # tan(δ)
|
||||
|
||||
# Physical properties
|
||||
density: float = 1000.0 # kg/m³
|
||||
temperature: float = 293.15 # K (20°C)
|
||||
|
||||
# Surface properties for reflection/transmission
|
||||
surface_roughness: float = 0.0 # RMS roughness in meters
|
||||
surface_conductivity: Optional[float] = None # Surface conductivity for metals
|
||||
|
||||
# Composite material properties
|
||||
is_composite: bool = False
|
||||
composite_layers: List['AdvancedMaterial'] = field(default_factory=list)
|
||||
layer_thicknesses: List[float] = field(default_factory=list)
|
||||
|
||||
def get_relative_permittivity(self, frequency: float) -> complex:
|
||||
"""Get complex relative permittivity at given frequency."""
|
||||
if isinstance(self.relative_permittivity, FrequencyDependentProperty):
|
||||
eps_r_real = self.relative_permittivity.get_value(frequency)
|
||||
else:
|
||||
eps_r_real = self.relative_permittivity
|
||||
|
||||
# Get conductivity
|
||||
if isinstance(self.conductivity, FrequencyDependentProperty):
|
||||
sigma = self.conductivity.get_value(frequency)
|
||||
else:
|
||||
sigma = self.conductivity
|
||||
|
||||
# Get loss tangent if available
|
||||
if self.loss_tangent is not None:
|
||||
if isinstance(self.loss_tangent, FrequencyDependentProperty):
|
||||
tan_delta = self.loss_tangent.get_value(frequency)
|
||||
else:
|
||||
tan_delta = self.loss_tangent
|
||||
eps_r_imag = eps_r_real * tan_delta
|
||||
else:
|
||||
# Calculate from conductivity
|
||||
omega = 2 * np.pi * frequency
|
||||
eps_r_imag = sigma / (omega * EPSILON_0)
|
||||
|
||||
return eps_r_real - 1j * eps_r_imag
|
||||
|
||||
def get_relative_permeability(self, frequency: float) -> complex:
|
||||
"""Get complex relative permeability at given frequency."""
|
||||
if isinstance(self.relative_permeability, FrequencyDependentProperty):
|
||||
mu_r = self.relative_permeability.get_value(frequency)
|
||||
else:
|
||||
mu_r = self.relative_permeability
|
||||
|
||||
# For most materials, μᵣ ≈ 1 (non-magnetic)
|
||||
return mu_r - 1j * 0.0
|
||||
|
||||
def get_propagation_constant(self, frequency: float) -> complex:
|
||||
"""Calculate complex propagation constant γ = α + jβ."""
|
||||
omega = 2 * np.pi * frequency
|
||||
eps_r = self.get_relative_permittivity(frequency)
|
||||
mu_r = self.get_relative_permeability(frequency)
|
||||
|
||||
# Complex propagation constant
|
||||
gamma = 1j * omega * np.sqrt(MU_0 * EPSILON_0 * eps_r * mu_r)
|
||||
return gamma
|
||||
|
||||
def get_attenuation_constant(self, frequency: float) -> float:
|
||||
"""Get power attenuation constant α (Np/m)."""
|
||||
gamma = self.get_propagation_constant(frequency)
|
||||
return np.real(gamma)
|
||||
|
||||
def get_phase_constant(self, frequency: float) -> float:
|
||||
"""Get phase constant β (rad/m)."""
|
||||
gamma = self.get_propagation_constant(frequency)
|
||||
return np.imag(gamma)
|
||||
|
||||
def get_intrinsic_impedance(self, frequency: float) -> complex:
|
||||
"""Get intrinsic impedance of the material."""
|
||||
eps_r = self.get_relative_permittivity(frequency)
|
||||
mu_r = self.get_relative_permeability(frequency)
|
||||
return ETA_0 * np.sqrt(mu_r / eps_r)
|
||||
|
||||
def calculate_attenuation(self, frequency: float = 2.4e9, thickness: float = None,
|
||||
angle_of_incidence: float = 0.0) -> float:
|
||||
"""
|
||||
Calculate precise signal attenuation through the material.
|
||||
|
||||
Args:
|
||||
frequency: Signal frequency in Hz
|
||||
thickness: Material thickness in meters (if None, uses default)
|
||||
angle_of_incidence: Angle of incidence in radians (0 = normal incidence)
|
||||
|
||||
Returns:
|
||||
Attenuation in dB
|
||||
"""
|
||||
if self.is_composite and self.composite_layers:
|
||||
return self._calculate_composite_attenuation(frequency, thickness, angle_of_incidence)
|
||||
|
||||
# Get attenuation constant
|
||||
alpha = self.get_attenuation_constant(frequency)
|
||||
|
||||
# Apply thickness (exponential attenuation)
|
||||
if thickness is None:
|
||||
thickness = 0.1 # Default thickness
|
||||
|
||||
# Basic exponential attenuation
|
||||
attenuation_np = alpha * thickness / np.cos(angle_of_incidence) if angle_of_incidence != 0 else alpha * thickness
|
||||
|
||||
# Convert to dB (8.686 = 20/ln(10))
|
||||
attenuation_db = 8.686 * attenuation_np
|
||||
|
||||
return attenuation_db
|
||||
|
||||
def _calculate_composite_attenuation(self, frequency: float, total_thickness: float,
|
||||
angle_of_incidence: float) -> float:
|
||||
"""Calculate attenuation for composite materials using transfer matrix method."""
|
||||
if not self.composite_layers or not self.layer_thicknesses:
|
||||
return self.calculate_attenuation(frequency, total_thickness, angle_of_incidence)
|
||||
|
||||
# Transfer matrix method for multilayer materials
|
||||
total_attenuation = 0.0
|
||||
|
||||
for layer, layer_thickness in zip(self.composite_layers, self.layer_thicknesses):
|
||||
layer_atten = layer.calculate_attenuation(frequency, layer_thickness, angle_of_incidence)
|
||||
total_attenuation += layer_atten
|
||||
|
||||
return total_attenuation
|
||||
|
||||
def calculate_reflection_coefficient(self, frequency: float, angle_of_incidence: float,
|
||||
polarization: str = 'TE') -> complex:
|
||||
"""
|
||||
Calculate reflection coefficient using Fresnel equations.
|
||||
|
||||
Args:
|
||||
frequency: Signal frequency in Hz
|
||||
angle_of_incidence: Angle of incidence in radians
|
||||
polarization: 'TE' (transverse electric) or 'TM' (transverse magnetic)
|
||||
|
||||
Returns:
|
||||
Complex reflection coefficient
|
||||
"""
|
||||
# Assume incident medium is air (εᵣ = 1, μᵣ = 1)
|
||||
eta_1 = ETA_0 # Air impedance
|
||||
eta_2 = self.get_intrinsic_impedance(frequency)
|
||||
|
||||
if polarization.upper() == 'TE':
|
||||
# TE polarization (E-field perpendicular to plane of incidence)
|
||||
reflection_coeff = (eta_2 * np.cos(angle_of_incidence) - eta_1 * np.cos(self._get_transmission_angle(frequency, angle_of_incidence))) / \
|
||||
(eta_2 * np.cos(angle_of_incidence) + eta_1 * np.cos(self._get_transmission_angle(frequency, angle_of_incidence)))
|
||||
else:
|
||||
# TM polarization (E-field parallel to plane of incidence)
|
||||
reflection_coeff = (eta_1 * np.cos(angle_of_incidence) - eta_2 * np.cos(self._get_transmission_angle(frequency, angle_of_incidence))) / \
|
||||
(eta_1 * np.cos(angle_of_incidence) + eta_2 * np.cos(self._get_transmission_angle(frequency, angle_of_incidence)))
|
||||
|
||||
return reflection_coeff
|
||||
|
||||
def calculate_transmission_coefficient(self, frequency: float, angle_of_incidence: float,
|
||||
polarization: str = 'TE') -> complex:
|
||||
"""Calculate transmission coefficient using Fresnel equations."""
|
||||
reflection_coeff = self.calculate_reflection_coefficient(frequency, angle_of_incidence, polarization)
|
||||
return 1.0 + reflection_coeff # T = 1 + R
|
||||
|
||||
def _get_transmission_angle(self, frequency: float, angle_of_incidence: float) -> float:
|
||||
"""Calculate transmission angle using Snell's Law."""
|
||||
# Assume incident medium is air (n₁ = 1)
|
||||
n1 = 1.0
|
||||
eps_r = self.get_relative_permittivity(frequency)
|
||||
n2 = np.sqrt(np.real(eps_r)) # Refractive index
|
||||
|
||||
# Snell's Law: n₁ sin(θ₁) = n₂ sin(θ₂)
|
||||
sin_theta_2 = n1 * np.sin(angle_of_incidence) / n2
|
||||
|
||||
# Handle total internal reflection
|
||||
if abs(sin_theta_2) > 1.0:
|
||||
return np.pi / 2 # Critical angle
|
||||
|
||||
return np.arcsin(sin_theta_2)
|
||||
|
||||
def calculate_total_attenuation_with_reflection(self, frequency: float, thickness: float,
|
||||
angle_of_incidence: float = 0.0,
|
||||
polarization: str = 'TE') -> float:
|
||||
"""
|
||||
Calculate total attenuation including reflection losses.
|
||||
|
||||
Args:
|
||||
frequency: Signal frequency in Hz
|
||||
thickness: Material thickness in meters
|
||||
angle_of_incidence: Angle of incidence in radians
|
||||
polarization: 'TE' or 'TM'
|
||||
|
||||
Returns:
|
||||
Total attenuation in dB
|
||||
"""
|
||||
# Transmission coefficient (power)
|
||||
T = self.calculate_transmission_coefficient(frequency, angle_of_incidence, polarization)
|
||||
transmission_loss_db = -10 * np.log10(np.abs(T)**2)
|
||||
|
||||
# Material attenuation
|
||||
material_attenuation_db = self.calculate_attenuation(frequency, thickness, angle_of_incidence)
|
||||
|
||||
# Total attenuation
|
||||
total_attenuation_db = transmission_loss_db + material_attenuation_db
|
||||
|
||||
return total_attenuation_db
|
||||
|
||||
# Frequency-dependent material properties database
|
||||
FREQUENCY_DEPENDENT_PROPERTIES = {
|
||||
'concrete': {
|
||||
'relative_permittivity': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[5.0, 4.5, 4.2, 4.0]
|
||||
),
|
||||
'conductivity': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[0.02, 0.014, 0.012, 0.010]
|
||||
),
|
||||
'loss_tangent': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[0.15, 0.12, 0.10, 0.08]
|
||||
)
|
||||
},
|
||||
'glass': {
|
||||
'relative_permittivity': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[6.5, 6.0, 5.8, 5.6]
|
||||
),
|
||||
'conductivity': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[0.005, 0.004, 0.003, 0.002]
|
||||
)
|
||||
},
|
||||
'drywall': {
|
||||
'relative_permittivity': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[2.2, 2.0, 1.9, 1.8]
|
||||
),
|
||||
'conductivity': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[0.002, 0.001, 0.0008, 0.0006]
|
||||
)
|
||||
},
|
||||
'metal': {
|
||||
'relative_permittivity': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[1.0, 1.0, 1.0, 1.0]
|
||||
),
|
||||
'conductivity': FrequencyDependentProperty(
|
||||
frequencies=[1e9, 2.4e9, 5e9, 10e9],
|
||||
values=[1e7, 1e7, 1e7, 1e7]
|
||||
),
|
||||
'surface_conductivity': 1e7
|
||||
}
|
||||
}
|
||||
|
||||
# Advanced materials with frequency-dependent properties
|
||||
ADVANCED_MATERIALS = {
|
||||
'concrete': AdvancedMaterial(
|
||||
name='Concrete',
|
||||
relative_permittivity=FREQUENCY_DEPENDENT_PROPERTIES['concrete']['relative_permittivity'],
|
||||
conductivity=FREQUENCY_DEPENDENT_PROPERTIES['concrete']['conductivity'],
|
||||
loss_tangent=FREQUENCY_DEPENDENT_PROPERTIES['concrete']['loss_tangent'],
|
||||
density=2400.0,
|
||||
surface_roughness=0.001
|
||||
),
|
||||
'glass': AdvancedMaterial(
|
||||
name='Glass',
|
||||
relative_permittivity=FREQUENCY_DEPENDENT_PROPERTIES['glass']['relative_permittivity'],
|
||||
conductivity=FREQUENCY_DEPENDENT_PROPERTIES['glass']['conductivity'],
|
||||
density=2500.0,
|
||||
surface_roughness=0.0001
|
||||
),
|
||||
'drywall': AdvancedMaterial(
|
||||
name='Drywall',
|
||||
relative_permittivity=FREQUENCY_DEPENDENT_PROPERTIES['drywall']['relative_permittivity'],
|
||||
conductivity=FREQUENCY_DEPENDENT_PROPERTIES['drywall']['conductivity'],
|
||||
density=800.0,
|
||||
surface_roughness=0.0005
|
||||
),
|
||||
'metal': AdvancedMaterial(
|
||||
name='Metal',
|
||||
relative_permittivity=FREQUENCY_DEPENDENT_PROPERTIES['metal']['relative_permittivity'],
|
||||
conductivity=FREQUENCY_DEPENDENT_PROPERTIES['metal']['conductivity'],
|
||||
surface_conductivity=FREQUENCY_DEPENDENT_PROPERTIES['metal']['surface_conductivity'],
|
||||
density=7850.0,
|
||||
surface_roughness=0.00001
|
||||
),
|
||||
'wood': AdvancedMaterial(
|
||||
name='Wood',
|
||||
relative_permittivity=2.1,
|
||||
conductivity=0.002,
|
||||
density=600.0,
|
||||
surface_roughness=0.002
|
||||
),
|
||||
'brick': AdvancedMaterial(
|
||||
name='Brick',
|
||||
relative_permittivity=4.0,
|
||||
conductivity=0.01,
|
||||
density=1800.0,
|
||||
surface_roughness=0.003
|
||||
),
|
||||
'tile': AdvancedMaterial(
|
||||
name='Tile',
|
||||
relative_permittivity=5.0,
|
||||
conductivity=0.003,
|
||||
density=2300.0,
|
||||
surface_roughness=0.0002
|
||||
),
|
||||
'carpet': AdvancedMaterial(
|
||||
name='Carpet',
|
||||
relative_permittivity=2.5,
|
||||
conductivity=0.001,
|
||||
density=1200.0,
|
||||
surface_roughness=0.005
|
||||
),
|
||||
'air': AdvancedMaterial(
|
||||
name='Air',
|
||||
relative_permittivity=1.0,
|
||||
conductivity=0.0,
|
||||
density=1.225,
|
||||
surface_roughness=0.0
|
||||
)
|
||||
}
|
||||
|
||||
# Composite materials (e.g., reinforced concrete, insulated walls)
|
||||
def create_reinforced_concrete() -> AdvancedMaterial:
|
||||
"""Create reinforced concrete as a composite material."""
|
||||
concrete = ADVANCED_MATERIALS['concrete']
|
||||
steel = AdvancedMaterial(
|
||||
name='Steel',
|
||||
relative_permittivity=1.0,
|
||||
conductivity=1e7,
|
||||
density=7850.0
|
||||
)
|
||||
|
||||
# Reinforced concrete: 95% concrete, 5% steel reinforcement
|
||||
composite = AdvancedMaterial(
|
||||
name='Reinforced Concrete',
|
||||
relative_permittivity=4.5, # Effective permittivity
|
||||
conductivity=0.02, # Effective conductivity
|
||||
is_composite=True,
|
||||
composite_layers=[concrete, steel],
|
||||
layer_thicknesses=[0.19, 0.01], # 19cm concrete, 1cm steel
|
||||
density=2500.0
|
||||
)
|
||||
|
||||
return composite
|
||||
|
||||
def create_insulated_wall() -> AdvancedMaterial:
|
||||
"""Create insulated wall as a composite material."""
|
||||
drywall = ADVANCED_MATERIALS['drywall']
|
||||
insulation = AdvancedMaterial(
|
||||
name='Insulation',
|
||||
relative_permittivity=1.8,
|
||||
conductivity=0.0005,
|
||||
density=50.0
|
||||
)
|
||||
|
||||
# Insulated wall: drywall-insulation-drywall
|
||||
composite = AdvancedMaterial(
|
||||
name='Insulated Wall',
|
||||
relative_permittivity=2.0, # Effective permittivity
|
||||
conductivity=0.001, # Effective conductivity
|
||||
is_composite=True,
|
||||
composite_layers=[drywall, insulation, drywall],
|
||||
layer_thicknesses=[0.016, 0.1, 0.016], # 16mm drywall, 10cm insulation, 16mm drywall
|
||||
density=400.0
|
||||
)
|
||||
|
||||
return composite
|
||||
|
||||
# Add composite materials to the database
|
||||
ADVANCED_MATERIALS['reinforced_concrete'] = create_reinforced_concrete()
|
||||
ADVANCED_MATERIALS['insulated_wall'] = create_insulated_wall()
|
||||
|
||||
# Backward compatibility: Keep original Material class
|
||||
@dataclass(frozen=True)
|
||||
class Material:
|
||||
"""Legacy Material class for backward compatibility."""
|
||||
name: str
|
||||
relative_permittivity: float
|
||||
conductivity: float
|
||||
thickness: float
|
||||
color: tuple[float, float, float] = (0.5, 0.5, 0.5)
|
||||
|
||||
def calculate_attenuation(self, frequency: float = 2.4e9) -> float:
|
||||
"""Legacy attenuation calculation."""
|
||||
# Convert to AdvancedMaterial for calculation
|
||||
adv_material = AdvancedMaterial(
|
||||
name=self.name,
|
||||
relative_permittivity=self.relative_permittivity,
|
||||
conductivity=self.conductivity
|
||||
)
|
||||
return adv_material.calculate_attenuation(frequency, self.thickness)
|
||||
|
||||
# Legacy MATERIALS dictionary for backward compatibility
|
||||
MATERIALS = {
|
||||
'concrete': Material('Concrete', 4.5, 0.014, 0.2),
|
||||
'glass': Material('Glass', 6.0, 0.004, 0.006),
|
||||
'wood': Material('Wood', 2.1, 0.002, 0.04),
|
||||
'drywall': Material('Drywall', 2.0, 0.001, 0.016),
|
||||
'metal': Material('Metal', 1.0, 1e7, 0.002),
|
||||
'brick': Material('Brick', 4.0, 0.01, 0.1),
|
||||
'plaster': Material('Plaster', 3.0, 0.005, 0.02),
|
||||
'tile': Material('Tile', 5.0, 0.003, 0.01),
|
||||
'asphalt': Material('Asphalt', 3.5, 0.006, 0.05),
|
||||
'carpet': Material('Carpet', 2.5, 0.001, 0.01),
|
||||
'plastic': Material('Plastic', 2.3, 0.0001, 0.005),
|
||||
'insulation': Material('Insulation', 1.8, 0.0005, 0.05),
|
||||
'fiber_cement': Material('Fiber Cement', 3.2, 0.002, 0.015),
|
||||
'steel': Material('Steel', 1.0, 1e7, 0.005),
|
||||
'copper': Material('Copper', 1.0, 5.8e7, 0.001),
|
||||
'aluminum': Material('Aluminum', 1.0, 3.5e7, 0.002),
|
||||
'foam': Material('Foam', 1.5, 0.0002, 0.03),
|
||||
'rubber': Material('Rubber', 2.0, 0.0001, 0.01),
|
||||
'ceramic': Material('Ceramic', 6.5, 0.002, 0.01),
|
||||
'vinyl': Material('Vinyl', 2.2, 0.0005, 0.002),
|
||||
'air': Material('Air', 1.0, 0.0, 0.0)
|
||||
}
|
||||
|
||||
class MaterialLayer:
|
||||
"""Represents a layer of material in the signal path."""
|
||||
def __init__(self, material: Union[Material, AdvancedMaterial], thickness_multiplier: float = 1.0):
|
||||
"""Initialize a material layer."""
|
||||
self.material = material
|
||||
self.thickness = material.thickness * thickness_multiplier if hasattr(material, 'thickness') else 0.1
|
||||
|
||||
def get_attenuation(self, frequency: float = 2.4e9, angle_of_incidence: float = 0.0) -> float:
|
||||
"""Get the total attenuation through this layer."""
|
||||
if isinstance(self.material, AdvancedMaterial):
|
||||
return self.material.calculate_attenuation(frequency, self.thickness, angle_of_incidence)
|
||||
else:
|
||||
return self.material.calculate_attenuation(frequency)
|
||||
|
||||
class SignalPath:
|
||||
"""Represents the path of a signal through various materials with advanced physics."""
|
||||
def __init__(self):
|
||||
"""Initialize an empty signal path."""
|
||||
self.layers: List[MaterialLayer] = []
|
||||
|
||||
def add_layer(self, material: Union[Material, AdvancedMaterial], thickness_multiplier: float = 1.0):
|
||||
"""Add a material layer to the path."""
|
||||
self.layers.append(MaterialLayer(material, thickness_multiplier))
|
||||
|
||||
def calculate_total_attenuation(self, frequency: float = 2.4e9, angle_of_incidence: float = 0.0) -> float:
|
||||
"""Calculate total attenuation along the path with advanced physics."""
|
||||
total_attenuation = 0.0
|
||||
|
||||
for layer in self.layers:
|
||||
layer_atten = layer.get_attenuation(frequency, angle_of_incidence)
|
||||
total_attenuation += layer_atten
|
||||
|
||||
return total_attenuation
|
||||
|
||||
def calculate_reflection_losses(self, frequency: float = 2.4e9, angle_of_incidence: float = 0.0) -> float:
|
||||
"""Calculate reflection losses at material interfaces."""
|
||||
if len(self.layers) < 2:
|
||||
return 0.0
|
||||
|
||||
total_reflection_loss = 0.0
|
||||
|
||||
for i in range(len(self.layers) - 1):
|
||||
layer1 = self.layers[i].material
|
||||
layer2 = self.layers[i + 1].material
|
||||
|
||||
if isinstance(layer1, AdvancedMaterial) and isinstance(layer2, AdvancedMaterial):
|
||||
# Calculate reflection coefficient at interface
|
||||
R = layer1.calculate_reflection_coefficient(frequency, angle_of_incidence)
|
||||
reflection_loss_db = -10 * np.log10(1 - np.abs(R)**2)
|
||||
total_reflection_loss += reflection_loss_db
|
||||
|
||||
return total_reflection_loss
|
||||
|
||||
def test_advanced_material_properties():
|
||||
"""Test advanced material properties and calculations."""
|
||||
print("=== Testing Advanced Material Properties ===")
|
||||
|
||||
# Test frequency-dependent properties
|
||||
concrete = ADVANCED_MATERIALS['concrete']
|
||||
frequencies = [1e9, 2.4e9, 5e9, 10e9]
|
||||
|
||||
print(f"\nConcrete Properties vs Frequency:")
|
||||
print(f"{'Frequency (GHz)':<15} {'εᵣ':<10} {'σ (S/m)':<12} {'α (Np/m)':<12} {'Atten (dB/cm)':<15}")
|
||||
print("-" * 70)
|
||||
|
||||
for freq in frequencies:
|
||||
eps_r = concrete.get_relative_permittivity(freq)
|
||||
sigma = concrete.conductivity.get_value(freq) if isinstance(concrete.conductivity, FrequencyDependentProperty) else concrete.conductivity
|
||||
alpha = concrete.get_attenuation_constant(freq)
|
||||
atten_db_cm = concrete.calculate_attenuation(freq, 0.01) # 1cm thickness
|
||||
|
||||
print(f"{freq/1e9:<15.1f} {np.real(eps_r):<10.2f} {sigma:<12.4f} {alpha:<12.4f} {atten_db_cm:<15.2f}")
|
||||
|
||||
# Test angle-dependent attenuation
|
||||
print(f"\nAngle-Dependent Attenuation (Glass, 2.4 GHz, 1cm):")
|
||||
print(f"{'Angle (deg)':<12} {'Atten (dB)':<12} {'Reflection Loss (dB)':<20} {'Total (dB)':<12}")
|
||||
print("-" * 60)
|
||||
|
||||
glass = ADVANCED_MATERIALS['glass']
|
||||
angles_deg = [0, 15, 30, 45, 60, 75, 85]
|
||||
|
||||
for angle_deg in angles_deg:
|
||||
angle_rad = np.radians(angle_deg)
|
||||
atten = glass.calculate_attenuation(2.4e9, 0.01, angle_rad)
|
||||
refl_loss = glass.calculate_reflection_coefficient(2.4e9, angle_rad)
|
||||
refl_loss_db = -10 * np.log10(1 - np.abs(refl_loss)**2)
|
||||
total = atten + refl_loss_db
|
||||
|
||||
print(f"{angle_deg:<12} {atten:<12.2f} {refl_loss_db:<20.2f} {total:<12.2f}")
|
||||
|
||||
# Test composite materials
|
||||
print(f"\nComposite Material Comparison:")
|
||||
print(f"{'Material':<20} {'Thickness':<12} {'Atten (dB)':<12}")
|
||||
print("-" * 50)
|
||||
|
||||
materials_to_test = [
|
||||
('Concrete', ADVANCED_MATERIALS['concrete'], 0.2),
|
||||
('Reinforced Concrete', ADVANCED_MATERIALS['reinforced_concrete'], 0.2),
|
||||
('Insulated Wall', ADVANCED_MATERIALS['insulated_wall'], 0.132),
|
||||
('Glass', ADVANCED_MATERIALS['glass'], 0.006)
|
||||
]
|
||||
|
||||
for name, material, thickness in materials_to_test:
|
||||
atten = material.calculate_attenuation(2.4e9, thickness)
|
||||
print(f"{name:<20} {thickness:<12.3f} {atten:<12.2f}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_advanced_material_properties()
|
||||
39
src/preprocessing/data_augmentation.py
Normal file
39
src/preprocessing/data_augmentation.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""
|
||||
Data Augmentation Utilities for WiFi ML
|
||||
- Add realistic noise, interference, and fading
|
||||
- Simulate environmental variability (materials, AP heights, obstacles)
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
def add_thermal_noise(rssi, noise_floor_dbm=-95, std_db=2.0):
|
||||
"""Add Gaussian thermal noise to RSSI values."""
|
||||
noise = np.random.normal(0, std_db, size=np.shape(rssi))
|
||||
return rssi + noise
|
||||
|
||||
def add_interference(rssi, interference_dbm=-80, prob=0.1):
|
||||
"""Randomly add interference spikes to RSSI values."""
|
||||
mask = np.random.rand(*np.shape(rssi)) < prob
|
||||
interference = np.zeros_like(rssi)
|
||||
interference[mask] = np.random.uniform(-10, 0, size=np.sum(mask))
|
||||
return rssi + interference
|
||||
|
||||
def add_fading(rssi, fading_type='rayleigh', K=5):
|
||||
"""Add small-scale fading (Rayleigh or Rician) to RSSI values."""
|
||||
if fading_type == 'rayleigh':
|
||||
fading = np.random.rayleigh(scale=2, size=np.shape(rssi))
|
||||
elif fading_type == 'rician':
|
||||
fading = np.random.rayleigh(scale=2, size=np.shape(rssi)) + K
|
||||
else:
|
||||
fading = np.zeros_like(rssi)
|
||||
return rssi - fading # Fading reduces RSSI
|
||||
|
||||
def simulate_environmental_variability(X, config=None):
|
||||
"""Augment features to simulate different environments (materials, AP heights, obstacles)."""
|
||||
X_aug = X.copy()
|
||||
if 'ap_height' in X.dtype.names:
|
||||
X_aug['ap_height'] += np.random.uniform(-0.5, 0.5, size=X.shape[0])
|
||||
if 'material_id' in X.dtype.names:
|
||||
X_aug['material_id'] = np.random.choice([0,1,2,3], size=X.shape[0])
|
||||
if 'num_obstacles' in X.dtype.names:
|
||||
X_aug['num_obstacles'] += np.random.randint(-1, 2, size=X.shape[0])
|
||||
return X_aug
|
||||
37
src/preprocessing/feature_engineering.py
Normal file
37
src/preprocessing/feature_engineering.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
Feature Engineering for WiFi ML
|
||||
- Compute advanced features: distance to nearest obstacle, number of walls crossed, angle of incidence, etc.
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
def distance_to_nearest_obstacle(rx, obstacles):
|
||||
"""Compute distance from receiver to nearest obstacle."""
|
||||
rx = np.array(rx)
|
||||
obstacles = np.array(obstacles)
|
||||
dists = np.linalg.norm(obstacles - rx, axis=1)
|
||||
return np.min(dists) if len(dists) > 0 else np.nan
|
||||
|
||||
def number_of_walls_crossed(ap, rx, wall_segments):
|
||||
"""Estimate number of walls crossed between AP and receiver (stub)."""
|
||||
# For now, just count walls whose segment crosses the line (ap, rx)
|
||||
# wall_segments: list of ((x1, y1), (x2, y2))
|
||||
def crosses(ap, rx, wall):
|
||||
# Simple 2D line intersection stub
|
||||
return False # TODO: Implement real geometry
|
||||
return sum(crosses(ap, rx, wall) for wall in wall_segments)
|
||||
|
||||
def angle_of_incidence(ap, rx, wall):
|
||||
"""Compute angle of incidence at wall (stub)."""
|
||||
# wall: ((x1, y1), (x2, y2))
|
||||
# Return angle in degrees
|
||||
return 0.0 # TODO: Implement real geometry
|
||||
|
||||
def build_feature_matrix(aps, rxs, obstacles, wall_segments):
|
||||
"""Build feature matrix for ML model."""
|
||||
features = []
|
||||
for ap, rx in zip(aps, rxs):
|
||||
d_nearest = distance_to_nearest_obstacle(rx, obstacles)
|
||||
n_walls = number_of_walls_crossed(ap, rx, wall_segments)
|
||||
angle = 0.0 # Could loop over walls for real angle
|
||||
features.append([d_nearest, n_walls, angle])
|
||||
return np.array(features)
|
||||
77
src/preprocessing/preprocessor.py
Normal file
77
src/preprocessing/preprocessor.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from sklearn.preprocessing import StandardScaler, LabelEncoder
|
||||
|
||||
class WiFiDataPreprocessor:
|
||||
def __init__(self):
|
||||
"""Initialize the WiFi data preprocessor."""
|
||||
self.label_encoders = {}
|
||||
self.scaler = StandardScaler()
|
||||
|
||||
def preprocess(self, data):
|
||||
"""Preprocess WiFi data for model training.
|
||||
|
||||
Args:
|
||||
data (pd.DataFrame): Raw WiFi data
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: Preprocessed data
|
||||
"""
|
||||
# Create a copy to avoid modifying original data
|
||||
df = data.copy()
|
||||
|
||||
# Convert timestamp to datetime if needed
|
||||
if 'timestamp' in df.columns and not pd.api.types.is_datetime64_any_dtype(df['timestamp']):
|
||||
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='s')
|
||||
|
||||
# Extract time-based features
|
||||
df['hour'] = df['timestamp'].dt.hour
|
||||
df['minute'] = df['timestamp'].dt.minute
|
||||
df['day_of_week'] = df['timestamp'].dt.dayofweek
|
||||
|
||||
# Encode categorical variables
|
||||
categorical_columns = ['ssid', 'bssid', 'security']
|
||||
for col in categorical_columns:
|
||||
if col in df.columns:
|
||||
if col not in self.label_encoders:
|
||||
self.label_encoders[col] = LabelEncoder()
|
||||
df[col + '_encoded'] = self.label_encoders[col].fit_transform(df[col])
|
||||
|
||||
# Create signal quality metric
|
||||
df['signal_quality'] = (df['rssi'] + 100) / 70.0 # Normalize to 0-1 range
|
||||
|
||||
# Calculate rolling statistics
|
||||
df['rssi_rolling_mean'] = df.groupby('ssid')['rssi'].transform(
|
||||
lambda x: x.rolling(window=5, min_periods=1).mean()
|
||||
)
|
||||
df['rssi_rolling_std'] = df.groupby('ssid')['rssi'].transform(
|
||||
lambda x: x.rolling(window=5, min_periods=1).std()
|
||||
)
|
||||
|
||||
# Create channel interference feature
|
||||
df['channel_group'] = df['channel'] // 4 # Group nearby channels
|
||||
df['ap_count_per_channel'] = df.groupby('channel_group')['ssid'].transform('count')
|
||||
|
||||
# Select and order features for model training
|
||||
feature_columns = [
|
||||
'rssi', 'signal_quality', 'channel',
|
||||
'hour', 'minute', 'day_of_week',
|
||||
'rssi_rolling_mean', 'rssi_rolling_std',
|
||||
'ap_count_per_channel'
|
||||
]
|
||||
|
||||
# Add encoded categorical columns
|
||||
feature_columns.extend([col + '_encoded' for col in categorical_columns])
|
||||
|
||||
# Fill missing values
|
||||
df[feature_columns] = df[feature_columns].ffill().bfill()
|
||||
|
||||
# Scale numerical features
|
||||
df[feature_columns] = self.scaler.fit_transform(df[feature_columns])
|
||||
|
||||
# Add location information if available
|
||||
if 'x' in df.columns and 'y' in df.columns:
|
||||
df['distance_to_center'] = np.sqrt((df['x'] - 0.5)**2 + (df['y'] - 0.5)**2)
|
||||
feature_columns.extend(['x', 'y', 'distance_to_center'])
|
||||
|
||||
return df[feature_columns]
|
||||
50
src/preprocessing/utils/display_config.py
Normal file
50
src/preprocessing/utils/display_config.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""
|
||||
Configuration module for display settings and coordinate transformations.
|
||||
Centralizes all dimension, scaling, and DPI settings to ensure consistency across visualizations.
|
||||
"""
|
||||
|
||||
class DisplayConfig:
|
||||
# Output image settings
|
||||
DPI = 300
|
||||
OUTPUT_WIDTH = 3210 # Width at 300 DPI
|
||||
OUTPUT_HEIGHT = 1948 # Height at 300 DPI
|
||||
|
||||
# Internal coordinate system (used by floor plan generator)
|
||||
INTERNAL_WIDTH = 1200
|
||||
INTERNAL_HEIGHT = 800
|
||||
|
||||
# Scaling factors
|
||||
X_SCALE = OUTPUT_WIDTH / INTERNAL_WIDTH
|
||||
Y_SCALE = OUTPUT_HEIGHT / INTERNAL_HEIGHT
|
||||
|
||||
# Standard figure sizes
|
||||
FIGURE_WIDTH = 12 # inches
|
||||
FIGURE_HEIGHT = 8 # inches
|
||||
|
||||
# AP positioning constants (in output coordinates)
|
||||
AP_MARGIN_X = 600 # pixels from edge
|
||||
AP_MARGIN_Y = 365 # pixels from top/bottom
|
||||
|
||||
@classmethod
|
||||
def to_output_coordinates(cls, x, y):
|
||||
"""Convert internal coordinates to output coordinates."""
|
||||
return (x * cls.X_SCALE, y * cls.Y_SCALE)
|
||||
|
||||
@classmethod
|
||||
def to_internal_coordinates(cls, x, y):
|
||||
"""Convert output coordinates to internal coordinates."""
|
||||
return (x / cls.X_SCALE, y / cls.Y_SCALE)
|
||||
|
||||
@classmethod
|
||||
def get_ap_positions(cls):
|
||||
"""Get standard AP positions in output coordinates."""
|
||||
return [
|
||||
# Upper left
|
||||
(cls.AP_MARGIN_X, cls.AP_MARGIN_Y, "AP_UpperLeft"),
|
||||
# Upper right
|
||||
(cls.OUTPUT_WIDTH - cls.AP_MARGIN_X, cls.AP_MARGIN_Y, "AP_UpperRight"),
|
||||
# Lower left
|
||||
(cls.AP_MARGIN_X, cls.OUTPUT_HEIGHT - cls.AP_MARGIN_Y, "AP_LowerLeft"),
|
||||
# Lower right
|
||||
(cls.OUTPUT_WIDTH - cls.AP_MARGIN_X, cls.OUTPUT_HEIGHT - cls.AP_MARGIN_Y, "AP_LowerRight")
|
||||
]
|
||||
112
src/preprocessing/utils/floor_plan_generator.py
Normal file
112
src/preprocessing/utils/floor_plan_generator.py
Normal file
@@ -0,0 +1,112 @@
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.patches import Rectangle
|
||||
import os
|
||||
from .display_config import DisplayConfig
|
||||
import numpy as np
|
||||
from skimage.draw import polygon as sk_polygon
|
||||
from skimage.measure import find_contours
|
||||
|
||||
class FloorPlanGenerator:
|
||||
def __init__(self, width=DisplayConfig.INTERNAL_WIDTH, height=DisplayConfig.INTERNAL_HEIGHT, resolution=1.0):
|
||||
self.width = width
|
||||
self.height = height
|
||||
self.resolution = resolution
|
||||
self.rooms = []
|
||||
self._mask = None
|
||||
self._polygon = None
|
||||
|
||||
def add_room(self, x, y, width, height, room_type="office"):
|
||||
"""Add a room to the floor plan."""
|
||||
room = {
|
||||
'x': x,
|
||||
'y': self.height - y - height, # Flip y-coordinate
|
||||
'width': width,
|
||||
'height': height,
|
||||
'type': room_type
|
||||
}
|
||||
self.rooms.append(room)
|
||||
|
||||
def get_building_mask(self):
|
||||
"""Return a boolean mask (True=inside building) for the floor plan."""
|
||||
grid_w = int(np.ceil(self.width / self.resolution))
|
||||
grid_h = int(np.ceil(self.height / self.resolution))
|
||||
mask = np.zeros((grid_h, grid_w), dtype=bool)
|
||||
for room in self.rooms:
|
||||
x0 = int(room['x'] / self.resolution)
|
||||
y0 = int(room['y'] / self.resolution)
|
||||
x1 = int((room['x'] + room['width']) / self.resolution)
|
||||
y1 = int((room['y'] + room['height']) / self.resolution)
|
||||
mask[y0:y1, x0:x1] = True
|
||||
self._mask = mask
|
||||
return mask
|
||||
|
||||
def get_building_perimeter_polygon(self):
|
||||
"""Return the outer perimeter polygon as a list of (x, y) tuples in real coordinates."""
|
||||
if self._mask is None:
|
||||
self.get_building_mask()
|
||||
if self._mask is None:
|
||||
return None
|
||||
contours = find_contours(self._mask.astype(float), 0.5)
|
||||
if not contours:
|
||||
return None
|
||||
largest = max(contours, key=len)
|
||||
# Convert from grid to real coordinates
|
||||
polygon = [(x * self.resolution, (self._mask.shape[0] - y) * self.resolution) for y, x in largest]
|
||||
self._polygon = polygon
|
||||
return polygon
|
||||
|
||||
def draw_floor_plan(self, output_path, show_grid=False):
|
||||
"""Draw and save the floor plan."""
|
||||
fig, ax = plt.subplots(figsize=(DisplayConfig.FIGURE_WIDTH, DisplayConfig.FIGURE_HEIGHT))
|
||||
ax.set_xlim(0, self.width)
|
||||
ax.set_ylim(0, self.height)
|
||||
|
||||
# Draw rooms
|
||||
for room in self.rooms:
|
||||
# Draw room outline
|
||||
rect = Rectangle((room['x'], room['y']),
|
||||
room['width'], room['height'],
|
||||
facecolor='white',
|
||||
edgecolor='black',
|
||||
linewidth=2)
|
||||
ax.add_patch(rect)
|
||||
|
||||
# Add room label
|
||||
ax.text(room['x'] + room['width']/2,
|
||||
room['y'] + room['height']/2,
|
||||
room['type'],
|
||||
horizontalalignment='center',
|
||||
verticalalignment='center')
|
||||
|
||||
# Remove grid if not needed
|
||||
if not show_grid:
|
||||
ax.grid(False)
|
||||
|
||||
# Remove axis labels
|
||||
ax.set_xticks([])
|
||||
ax.set_yticks([])
|
||||
|
||||
# Save the floor plan
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
plt.savefig(output_path, bbox_inches='tight', dpi=DisplayConfig.DPI)
|
||||
plt.close()
|
||||
|
||||
return output_path
|
||||
|
||||
def create_example_floor_plan():
|
||||
"""Create an example floor plan with typical office layout."""
|
||||
generator = FloorPlanGenerator(width=1000, height=800)
|
||||
|
||||
# Generate random office layout
|
||||
generator.add_room(100, 100, 200, 200, 'office')
|
||||
generator.add_room(400, 100, 200, 200, 'meeting')
|
||||
generator.add_room(100, 400, 200, 200, 'open_space')
|
||||
|
||||
# Save the floor plan
|
||||
output_path = generator.draw_floor_plan("example_floor_plan.png")
|
||||
return output_path
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Generate example floor plan
|
||||
output_path = create_example_floor_plan()
|
||||
print(f"Example floor plan generated: {output_path}")
|
||||
232
src/preprocessing/utils/results_manager.py
Normal file
232
src/preprocessing/utils/results_manager.py
Normal file
@@ -0,0 +1,232 @@
|
||||
import os
|
||||
import json
|
||||
from datetime import datetime
|
||||
import shutil
|
||||
import pandas as pd
|
||||
|
||||
class ResultsManager:
|
||||
def __init__(self, base_dir="results"):
|
||||
"""Initialize the results manager.
|
||||
|
||||
Args:
|
||||
base_dir (str): Base directory for storing results
|
||||
"""
|
||||
self.base_dir = base_dir
|
||||
self.current_run = None
|
||||
|
||||
def start_new_run(self, description=None):
|
||||
"""Start a new test run.
|
||||
|
||||
Args:
|
||||
description (str): Optional description of the run
|
||||
|
||||
Returns:
|
||||
str: Path to the run directory
|
||||
"""
|
||||
# Create timestamp-based run ID
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
run_id = f"run_{timestamp}"
|
||||
|
||||
# Create run directory structure
|
||||
run_dir = os.path.join(self.base_dir, run_id)
|
||||
subdirs = ['data', 'visualizations', 'models', 'floor_plans']
|
||||
|
||||
os.makedirs(run_dir, exist_ok=True)
|
||||
for subdir in subdirs:
|
||||
os.makedirs(os.path.join(run_dir, subdir), exist_ok=True)
|
||||
|
||||
# Store run information
|
||||
self.current_run = {
|
||||
'id': run_id,
|
||||
'timestamp': timestamp,
|
||||
'description': description,
|
||||
'path': run_dir,
|
||||
'metrics': {},
|
||||
'files': {subdir: [] for subdir in subdirs}
|
||||
}
|
||||
|
||||
# Save initial run info
|
||||
self._save_run_info()
|
||||
|
||||
return run_dir
|
||||
|
||||
def save_data(self, data, filename, category='data'):
|
||||
"""Save data file to the current run.
|
||||
|
||||
Args:
|
||||
data (pd.DataFrame): Data to save
|
||||
filename (str): Name of the file
|
||||
category (str): Category of data (data, visualizations, models)
|
||||
"""
|
||||
if self.current_run is None:
|
||||
raise ValueError("No active run. Call start_new_run() first.")
|
||||
|
||||
filepath = os.path.join(self.current_run['path'], category, filename)
|
||||
|
||||
# Save based on file type
|
||||
if isinstance(data, pd.DataFrame):
|
||||
data.to_csv(filepath, index=False)
|
||||
else:
|
||||
# Assume it's a file to be copied
|
||||
shutil.copy2(data, filepath)
|
||||
|
||||
self.current_run['files'][category].append(filename)
|
||||
self._save_run_info()
|
||||
|
||||
def save_metrics(self, metrics, model_name):
|
||||
"""Save model metrics for the current run.
|
||||
|
||||
Args:
|
||||
metrics (dict): Dictionary of metrics
|
||||
model_name (str): Name of the model
|
||||
"""
|
||||
if self.current_run is None:
|
||||
raise ValueError("No active run. Call start_new_run() first.")
|
||||
|
||||
self.current_run['metrics'][model_name] = metrics
|
||||
self._save_run_info()
|
||||
|
||||
def save_visualization(self, figure_path, description=None):
|
||||
"""Save a visualization to the current run.
|
||||
|
||||
Args:
|
||||
figure_path (str): Path to the visualization file
|
||||
description (str): Optional description of the visualization
|
||||
"""
|
||||
if self.current_run is None:
|
||||
raise ValueError("No active run. Call start_new_run() first.")
|
||||
|
||||
filename = os.path.basename(figure_path)
|
||||
dest_path = os.path.join(self.current_run['path'], 'visualizations', filename)
|
||||
|
||||
shutil.copy2(figure_path, dest_path)
|
||||
|
||||
self.current_run['files']['visualizations'].append({
|
||||
'filename': filename,
|
||||
'description': description
|
||||
})
|
||||
self._save_run_info()
|
||||
|
||||
def save_floor_plan(self, floor_plan_path, floor_number=None, description=None):
|
||||
"""Save a floor plan to the current run.
|
||||
|
||||
Args:
|
||||
floor_plan_path (str): Path to the floor plan image
|
||||
floor_number (int): Optional floor number
|
||||
description (str): Optional description
|
||||
"""
|
||||
if self.current_run is None:
|
||||
raise ValueError("No active run. Call start_new_run() first.")
|
||||
|
||||
filename = os.path.basename(floor_plan_path)
|
||||
dest_path = os.path.join(self.current_run['path'], 'floor_plans', filename)
|
||||
|
||||
shutil.copy2(floor_plan_path, dest_path)
|
||||
|
||||
self.current_run['files']['floor_plans'].append({
|
||||
'filename': filename,
|
||||
'floor_number': floor_number,
|
||||
'description': description
|
||||
})
|
||||
self._save_run_info()
|
||||
|
||||
def _save_run_info(self):
|
||||
"""Save run information to JSON file."""
|
||||
info_path = os.path.join(self.current_run['path'], 'run_info.json')
|
||||
with open(info_path, 'w') as f:
|
||||
json.dump(self.current_run, f, indent=2)
|
||||
|
||||
def get_run_info(self, run_id=None):
|
||||
"""Get information about a specific run.
|
||||
|
||||
Args:
|
||||
run_id (str): ID of the run to get info for. If None, returns current run.
|
||||
|
||||
Returns:
|
||||
dict: Run information
|
||||
"""
|
||||
if run_id is None:
|
||||
if self.current_run is None:
|
||||
raise ValueError("No active run.")
|
||||
return self.current_run
|
||||
|
||||
info_path = os.path.join(self.base_dir, run_id, 'run_info.json')
|
||||
if not os.path.exists(info_path):
|
||||
raise ValueError(f"Run {run_id} not found.")
|
||||
|
||||
with open(info_path, 'r') as f:
|
||||
return json.load(f)
|
||||
|
||||
def list_runs(self):
|
||||
"""List all available runs.
|
||||
|
||||
Returns:
|
||||
list: List of run information dictionaries
|
||||
"""
|
||||
runs = []
|
||||
if os.path.exists(self.base_dir):
|
||||
for run_id in os.listdir(self.base_dir):
|
||||
try:
|
||||
runs.append(self.get_run_info(run_id))
|
||||
except:
|
||||
continue
|
||||
return sorted(runs, key=lambda x: x['timestamp'], reverse=True)
|
||||
|
||||
def generate_report(self, run_id=None, output_path=None):
|
||||
"""Generate a markdown report for a run.
|
||||
|
||||
Args:
|
||||
run_id (str): ID of the run to report on. If None, uses current run.
|
||||
output_path (str): Path to save the report. If None, saves in run directory.
|
||||
"""
|
||||
run_info = self.get_run_info(run_id)
|
||||
|
||||
# Generate report content
|
||||
report = [
|
||||
f"# Test Run Report: {run_info['id']}",
|
||||
f"\nRun Date: {run_info['timestamp']}",
|
||||
]
|
||||
|
||||
if run_info['description']:
|
||||
report.append(f"\nDescription: {run_info['description']}")
|
||||
|
||||
# Add metrics section
|
||||
if run_info['metrics']:
|
||||
report.append("\n## Model Performance")
|
||||
for model_name, metrics in run_info['metrics'].items():
|
||||
report.append(f"\n### {model_name}")
|
||||
for metric, value in metrics.items():
|
||||
report.append(f"- {metric}: {value}")
|
||||
|
||||
# Add visualizations section
|
||||
if run_info['files']['visualizations']:
|
||||
report.append("\n## Visualizations")
|
||||
for viz in run_info['files']['visualizations']:
|
||||
desc = viz['description'] if isinstance(viz, dict) else 'No description'
|
||||
filename = viz['filename'] if isinstance(viz, dict) else viz
|
||||
report.append(f"\n### {filename}")
|
||||
report.append(f"Description: {desc}")
|
||||
report.append(f"")
|
||||
|
||||
# Add floor plans section
|
||||
if run_info['files']['floor_plans']:
|
||||
report.append("\n## Floor Plans")
|
||||
for plan in run_info['files']['floor_plans']:
|
||||
if isinstance(plan, dict):
|
||||
floor_num = f"Floor {plan.get('floor_number', '')}" if plan.get('floor_number') else ''
|
||||
desc = plan.get('description', 'No description')
|
||||
filename = plan['filename']
|
||||
report.append(f"\n### {floor_num}")
|
||||
report.append(f"Description: {desc}")
|
||||
report.append(f"")
|
||||
else:
|
||||
report.append(f"\n")
|
||||
|
||||
# Save report
|
||||
if output_path is None:
|
||||
output_path = os.path.join(run_info['path'], 'report.md')
|
||||
|
||||
with open(output_path, 'w') as f:
|
||||
f.write('\n'.join(report))
|
||||
|
||||
return output_path
|
||||
588
src/propagation/engines.py
Normal file
588
src/propagation/engines.py
Normal file
@@ -0,0 +1,588 @@
|
||||
"""Advanced propagation engines with precise physics models."""
|
||||
|
||||
import numpy as np
|
||||
from typing import List, Tuple, Optional, Union, Dict
|
||||
from abc import ABC, abstractmethod
|
||||
import logging
|
||||
from scipy import constants
|
||||
from scipy.optimize import minimize_scalar
|
||||
import warnings
|
||||
|
||||
# Import advanced materials
|
||||
from src.physics.materials import (
|
||||
AdvancedMaterial, Material, ADVANCED_MATERIALS,
|
||||
FrequencyDependentProperty, EPSILON_0, MU_0, C, ETA_0
|
||||
)
|
||||
|
||||
class PropagationEngine(ABC):
|
||||
"""Abstract base class for propagation engines."""
|
||||
|
||||
@abstractmethod
|
||||
def calculate_rssi(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
materials_grid, **kwargs) -> float:
|
||||
"""Calculate RSSI at a point from an AP."""
|
||||
pass
|
||||
|
||||
class AdvancedPhysicsEngine(PropagationEngine):
|
||||
"""
|
||||
Advanced Physics Engine with precise electromagnetic modeling.
|
||||
|
||||
Features:
|
||||
- Frequency-dependent material properties
|
||||
- Angle-dependent attenuation using Snell's Law and Fresnel equations
|
||||
- Thickness-dependent exponential attenuation
|
||||
- Composite material handling
|
||||
- Surface roughness effects
|
||||
- Temperature-dependent properties
|
||||
- Multi-path interference modeling
|
||||
"""
|
||||
|
||||
def __init__(self, frequency: float = 2.4e9, temperature: float = 293.15):
|
||||
"""Initialize the advanced physics engine.
|
||||
|
||||
Args:
|
||||
frequency: Operating frequency in Hz
|
||||
temperature: Temperature in Kelvin
|
||||
"""
|
||||
self.frequency = frequency
|
||||
self.temperature = temperature
|
||||
self.wavelength = C / frequency
|
||||
self.k0 = 2 * np.pi / self.wavelength # Free space wavenumber
|
||||
|
||||
# Physical constants
|
||||
self.epsilon_0 = EPSILON_0
|
||||
self.mu_0 = MU_0
|
||||
self.eta_0 = ETA_0
|
||||
|
||||
# Engine configuration
|
||||
self.max_reflections = 3
|
||||
self.max_diffractions = 2
|
||||
self.include_surface_roughness = True
|
||||
self.include_temperature_effects = True
|
||||
self.use_composite_materials = True
|
||||
|
||||
logging.info(f"Advanced Physics Engine initialized at {frequency/1e9:.1f} GHz")
|
||||
|
||||
def calculate_rssi(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
materials_grid, **kwargs) -> float:
|
||||
"""
|
||||
Calculate precise RSSI using advanced electromagnetic physics.
|
||||
|
||||
Args:
|
||||
ap: AP coordinates (x, y, z)
|
||||
point: Receiver coordinates (x, y, z)
|
||||
materials_grid: 3D grid of materials
|
||||
**kwargs: Additional parameters (tx_power, polarization, etc.)
|
||||
|
||||
Returns:
|
||||
RSSI in dBm
|
||||
"""
|
||||
tx_power = kwargs.get('tx_power', 20.0)
|
||||
polarization = kwargs.get('polarization', 'TE')
|
||||
|
||||
# Calculate direct path
|
||||
direct_rssi = self._calculate_direct_path(ap, point, materials_grid, tx_power, polarization)
|
||||
|
||||
# Calculate reflected paths
|
||||
reflected_rssi = self._calculate_reflected_paths(ap, point, materials_grid, tx_power, polarization)
|
||||
|
||||
# Calculate diffracted paths
|
||||
diffracted_rssi = self._calculate_diffracted_paths(ap, point, materials_grid, tx_power)
|
||||
|
||||
# Combine all paths using power addition
|
||||
total_rssi = self._combine_multipath_signals([direct_rssi, reflected_rssi, diffracted_rssi])
|
||||
|
||||
return total_rssi
|
||||
|
||||
def _calculate_direct_path(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
materials_grid, tx_power: float,
|
||||
polarization: str) -> float:
|
||||
"""Calculate direct path RSSI with precise material modeling."""
|
||||
# Calculate distance
|
||||
distance = np.sqrt(sum((ap[i] - point[i])**2 for i in range(3)))
|
||||
|
||||
if distance < 1e-6:
|
||||
return tx_power # Very close to AP
|
||||
|
||||
# Free space path loss
|
||||
free_space_loss = 20 * np.log10(4 * np.pi * distance / self.wavelength)
|
||||
|
||||
# Material attenuation along the path
|
||||
material_attenuation = self._calculate_material_attenuation_3d(
|
||||
ap, point, materials_grid, polarization
|
||||
)
|
||||
|
||||
# Total RSSI
|
||||
rssi = tx_power - free_space_loss - material_attenuation
|
||||
|
||||
return rssi
|
||||
|
||||
def _calculate_material_attenuation_3d(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
materials_grid, polarization: str) -> float:
|
||||
"""
|
||||
Calculate precise material attenuation along 3D path with angle dependence.
|
||||
"""
|
||||
if materials_grid is None:
|
||||
return 0.0
|
||||
|
||||
# Use 3D Bresenham algorithm to traverse the path
|
||||
path_points = self._get_3d_path_points(ap, point)
|
||||
|
||||
total_attenuation = 0.0
|
||||
seen_materials = set()
|
||||
|
||||
for i, (x, y, z) in enumerate(path_points):
|
||||
# Get material at this point
|
||||
material = self._get_material_at_point(x, y, z, materials_grid)
|
||||
|
||||
if material is None or material.name == 'Air':
|
||||
continue
|
||||
|
||||
# Calculate angle of incidence for this segment
|
||||
if i < len(path_points) - 1:
|
||||
next_point = path_points[i + 1]
|
||||
angle_of_incidence = self._calculate_angle_of_incidence(
|
||||
path_points[i], next_point, materials_grid
|
||||
)
|
||||
else:
|
||||
angle_of_incidence = 0.0
|
||||
|
||||
# Calculate segment length
|
||||
if i < len(path_points) - 1:
|
||||
segment_length = np.sqrt(sum((path_points[i+1][j] - path_points[i][j])**2 for j in range(3)))
|
||||
else:
|
||||
segment_length = 0.1 # Default segment length
|
||||
|
||||
# Calculate attenuation for this material segment
|
||||
if isinstance(material, AdvancedMaterial):
|
||||
segment_atten = material.calculate_total_attenuation_with_reflection(
|
||||
self.frequency, segment_length, angle_of_incidence, polarization
|
||||
)
|
||||
else:
|
||||
# Legacy material
|
||||
segment_atten = material.calculate_attenuation(self.frequency)
|
||||
# Apply angle correction
|
||||
if angle_of_incidence > 0:
|
||||
segment_atten /= np.cos(angle_of_incidence)
|
||||
|
||||
# Avoid double-counting same material
|
||||
material_key = (material.name, x, y, z)
|
||||
if material_key not in seen_materials:
|
||||
total_attenuation += segment_atten
|
||||
seen_materials.add(material_key)
|
||||
|
||||
return total_attenuation
|
||||
|
||||
def _get_3d_path_points(self, start: Tuple[float, float, float],
|
||||
end: Tuple[float, float, float]) -> List[Tuple[float, float, float]]:
|
||||
"""Get 3D path points using Bresenham algorithm."""
|
||||
x1, y1, z1 = start
|
||||
x2, y2, z2 = end
|
||||
|
||||
# Convert to grid coordinates (assuming 0.2m resolution)
|
||||
resolution = 0.2
|
||||
gx1, gy1, gz1 = int(x1 / resolution), int(y1 / resolution), int(z1 / resolution)
|
||||
gx2, gy2, gz2 = int(x2 / resolution), int(y2 / resolution), int(z2 / resolution)
|
||||
|
||||
# 3D Bresenham algorithm
|
||||
points = []
|
||||
dx = abs(gx2 - gx1)
|
||||
dy = abs(gy2 - gy1)
|
||||
dz = abs(gz2 - gz1)
|
||||
xs = 1 if gx2 > gx1 else -1
|
||||
ys = 1 if gy2 > gy1 else -1
|
||||
zs = 1 if gz2 > gz1 else -1
|
||||
# Driving axis is X
|
||||
if dx >= dy and dx >= dz:
|
||||
p1 = 2 * dy - dx
|
||||
p2 = 2 * dz - dx
|
||||
while gx1 != gx2:
|
||||
points.append((gx1 * resolution, gy1 * resolution, gz1 * resolution))
|
||||
if p1 >= 0:
|
||||
gy1 += ys
|
||||
p1 -= 2 * dx
|
||||
if p2 >= 0:
|
||||
gz1 += zs
|
||||
p2 -= 2 * dx
|
||||
p1 += 2 * dy
|
||||
p2 += 2 * dz
|
||||
gx1 += xs
|
||||
# Driving axis is Y
|
||||
elif dy >= dx and dy >= dz:
|
||||
p1 = 2 * dx - dy
|
||||
p2 = 2 * dz - dy
|
||||
while gy1 != gy2:
|
||||
points.append((gx1 * resolution, gy1 * resolution, gz1 * resolution))
|
||||
if p1 >= 0:
|
||||
gx1 += xs
|
||||
p1 -= 2 * dy
|
||||
if p2 >= 0:
|
||||
gz1 += zs
|
||||
p2 -= 2 * dy
|
||||
p1 += 2 * dx
|
||||
p2 += 2 * dz
|
||||
gy1 += ys
|
||||
# Driving axis is Z
|
||||
else:
|
||||
p1 = 2 * dy - dz
|
||||
p2 = 2 * dx - dz
|
||||
while gz1 != gz2:
|
||||
points.append((gx1 * resolution, gy1 * resolution, gz1 * resolution))
|
||||
if p1 >= 0:
|
||||
gy1 += ys
|
||||
p1 -= 2 * dz
|
||||
if p2 >= 0:
|
||||
gx1 += xs
|
||||
p2 -= 2 * dz
|
||||
p1 += 2 * dy
|
||||
p2 += 2 * dx
|
||||
gz1 += zs
|
||||
points.append((gx2 * resolution, gy2 * resolution, gz2 * resolution))
|
||||
return points
|
||||
|
||||
def _get_material_at_point(self, x: float, y: float, z: float,
|
||||
materials_grid) -> Optional[Union[Material, AdvancedMaterial]]:
|
||||
"""Get material at a specific 3D point."""
|
||||
if materials_grid is None:
|
||||
return None
|
||||
|
||||
# Convert to grid coordinates
|
||||
resolution = 0.2
|
||||
gx = int(x / resolution)
|
||||
gy = int(y / resolution)
|
||||
gz = int(z / resolution)
|
||||
|
||||
# Check bounds
|
||||
if (0 <= gz < len(materials_grid) and
|
||||
0 <= gy < len(materials_grid[0]) and
|
||||
0 <= gx < len(materials_grid[0][0])):
|
||||
return materials_grid[gz][gy][gx]
|
||||
|
||||
return None
|
||||
|
||||
def _calculate_angle_of_incidence(self, point1: Tuple[float, float, float],
|
||||
point2: Tuple[float, float, float],
|
||||
materials_grid) -> float:
|
||||
"""Calculate angle of incidence with respect to material surface."""
|
||||
# For simplicity, assume normal incidence
|
||||
# In a more advanced implementation, this would calculate the actual angle
|
||||
# based on surface normal vectors
|
||||
return 0.0
|
||||
|
||||
def _calculate_reflected_paths(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
materials_grid, tx_power: float,
|
||||
polarization: str) -> float:
|
||||
"""Calculate reflected path contributions."""
|
||||
if self.max_reflections == 0:
|
||||
return -100.0 # No reflections
|
||||
|
||||
# Find major reflecting surfaces (walls, floor, ceiling)
|
||||
reflecting_surfaces = self._find_reflecting_surfaces(ap, point, materials_grid)
|
||||
|
||||
reflected_signals = []
|
||||
|
||||
for surface in reflecting_surfaces[:self.max_reflections]:
|
||||
# Calculate reflection point
|
||||
reflection_point = self._calculate_reflection_point(ap, point, surface)
|
||||
|
||||
if reflection_point is None:
|
||||
continue
|
||||
|
||||
# Calculate reflected path
|
||||
reflected_rssi = self._calculate_reflected_path(
|
||||
ap, reflection_point, point, surface, tx_power, polarization
|
||||
)
|
||||
|
||||
if reflected_rssi > -100:
|
||||
reflected_signals.append(reflected_rssi)
|
||||
|
||||
# Combine reflected signals
|
||||
if reflected_signals:
|
||||
return self._combine_multipath_signals(reflected_signals)
|
||||
else:
|
||||
return -100.0
|
||||
|
||||
def _find_reflecting_surfaces(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
materials_grid) -> List[Dict]:
|
||||
"""Find major reflecting surfaces in the environment."""
|
||||
surfaces = []
|
||||
|
||||
# Add floor and ceiling as reflecting surfaces
|
||||
surfaces.append({
|
||||
'type': 'floor',
|
||||
'z': 0.0,
|
||||
'material': ADVANCED_MATERIALS.get('concrete', None)
|
||||
})
|
||||
|
||||
surfaces.append({
|
||||
'type': 'ceiling',
|
||||
'z': 3.0, # Assume 3m ceiling height
|
||||
'material': ADVANCED_MATERIALS.get('concrete', None)
|
||||
})
|
||||
|
||||
# Add major walls (simplified)
|
||||
# In a full implementation, this would analyze the materials_grid
|
||||
# to find wall surfaces
|
||||
|
||||
return surfaces
|
||||
|
||||
def _calculate_reflection_point(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
surface: Dict) -> Optional[Tuple[float, float, float]]:
|
||||
"""Calculate reflection point on a surface."""
|
||||
if surface['type'] == 'floor':
|
||||
# Reflect AP across the floor
|
||||
return (ap[0], ap[1], -ap[2])
|
||||
elif surface['type'] == 'ceiling':
|
||||
# Reflect AP across the ceiling
|
||||
ceiling_z = surface['z']
|
||||
return (ap[0], ap[1], 2 * ceiling_z - ap[2])
|
||||
|
||||
return None
|
||||
|
||||
def _calculate_reflected_path(self, ap: Tuple[float, float, float],
|
||||
reflection_point: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
surface: Dict, tx_power: float,
|
||||
polarization: str) -> float:
|
||||
"""Calculate RSSI for a reflected path."""
|
||||
# Distance from AP to reflection point to receiver
|
||||
d1 = np.sqrt(sum((ap[i] - reflection_point[i])**2 for i in range(3)))
|
||||
d2 = np.sqrt(sum((reflection_point[i] - point[i])**2 for i in range(3)))
|
||||
total_distance = d1 + d2
|
||||
|
||||
# Free space path loss
|
||||
free_space_loss = 20 * np.log10(4 * np.pi * total_distance / self.wavelength)
|
||||
|
||||
# Reflection loss
|
||||
if surface['material'] is not None:
|
||||
reflection_coeff = surface['material'].calculate_reflection_coefficient(
|
||||
self.frequency, 0.0, polarization # Normal incidence
|
||||
)
|
||||
reflection_loss = -10 * np.log10(np.abs(reflection_coeff)**2)
|
||||
else:
|
||||
reflection_loss = 6.0 # Default reflection loss
|
||||
|
||||
# Material attenuation (simplified)
|
||||
material_attenuation = 0.0 # Could be calculated along the path
|
||||
|
||||
# Total RSSI
|
||||
rssi = tx_power - free_space_loss - reflection_loss - material_attenuation
|
||||
|
||||
return rssi
|
||||
|
||||
def _calculate_diffracted_paths(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
materials_grid, tx_power: float) -> float:
|
||||
"""Calculate diffracted path contributions."""
|
||||
if self.max_diffractions == 0:
|
||||
return -100.0 # No diffractions
|
||||
|
||||
# Simplified diffraction model
|
||||
# Count obstacles along the direct path
|
||||
obstacles = self._count_obstacles_along_path(ap, point, materials_grid)
|
||||
|
||||
if obstacles == 0:
|
||||
return -100.0 # No obstacles, no diffraction
|
||||
|
||||
# Diffraction loss (simplified)
|
||||
diffraction_loss = obstacles * 3.0 # 3dB per obstacle
|
||||
|
||||
# Calculate diffracted path RSSI
|
||||
distance = np.sqrt(sum((ap[i] - point[i])**2 for i in range(3)))
|
||||
free_space_loss = 20 * np.log10(4 * np.pi * distance / self.wavelength)
|
||||
|
||||
rssi = tx_power - free_space_loss - diffraction_loss
|
||||
|
||||
return rssi
|
||||
|
||||
def _count_obstacles_along_path(self, ap: Tuple[float, float, float],
|
||||
point: Tuple[float, float, float],
|
||||
materials_grid) -> int:
|
||||
"""Count obstacles along the direct path."""
|
||||
if materials_grid is None:
|
||||
return 0
|
||||
|
||||
path_points = self._get_3d_path_points(ap, point)
|
||||
obstacles = 0
|
||||
|
||||
for x, y, z in path_points:
|
||||
material = self._get_material_at_point(x, y, z, materials_grid)
|
||||
if material is not None and material.name != 'Air':
|
||||
obstacles += 1
|
||||
|
||||
return obstacles
|
||||
|
||||
def _combine_multipath_signals(self, signals: List[float]) -> float:
|
||||
"""Combine multiple signals using power addition."""
|
||||
if not signals:
|
||||
return -100.0
|
||||
|
||||
# Convert dBm to mW
|
||||
powers_mw = [10**(signal/10) for signal in signals if signal > -100]
|
||||
|
||||
if not powers_mw:
|
||||
return -100.0
|
||||
|
||||
# Sum powers
|
||||
total_power_mw = sum(powers_mw)
|
||||
|
||||
# Convert back to dBm
|
||||
total_rssi = 10 * np.log10(total_power_mw)
|
||||
|
||||
return total_rssi
|
||||
|
||||
def calculate_rssi_grid(self, ap: Tuple[float, float, float],
|
||||
points: List[Tuple[float, float, float]],
|
||||
materials_grid, **kwargs) -> np.ndarray:
|
||||
"""Calculate RSSI for a grid of points efficiently."""
|
||||
rssi_values = []
|
||||
|
||||
for point in points:
|
||||
rssi = self.calculate_rssi(ap, point, materials_grid, **kwargs)
|
||||
rssi_values.append(rssi)
|
||||
|
||||
return np.array(rssi_values)
|
||||
|
||||
class FastRayTracingEngine(PropagationEngine):
|
||||
"""
|
||||
Fast Ray Tracing Engine: Optimized version with advanced physics.
|
||||
"""
|
||||
def calculate_rssi(self, ap, point, materials_grid, **kwargs):
|
||||
# Use the advanced physics engine for calculations
|
||||
advanced_engine = AdvancedPhysicsEngine(
|
||||
frequency=kwargs.get('frequency', 2.4e9),
|
||||
temperature=kwargs.get('temperature', 293.15)
|
||||
)
|
||||
|
||||
return advanced_engine.calculate_rssi(ap, point, materials_grid, **kwargs)
|
||||
|
||||
class Cost231Engine(PropagationEngine):
|
||||
"""
|
||||
COST-231 Hata Model Engine with material corrections.
|
||||
"""
|
||||
def calculate_rssi(self, ap, point, materials_grid, **kwargs):
|
||||
# Extract coordinates
|
||||
ap_x, ap_y, ap_z = ap if len(ap) == 3 else (ap[0], ap[1], 0)
|
||||
x, y, z = point if len(point) == 3 else (point[0], point[1], 0)
|
||||
|
||||
# Calculate distance
|
||||
distance = np.sqrt((x - ap_x)**2 + (y - ap_y)**2 + (z - ap_z)**2)
|
||||
|
||||
if distance < 1e-3:
|
||||
return kwargs.get('tx_power', 20.0)
|
||||
|
||||
# COST-231 Hata model parameters
|
||||
frequency = kwargs.get('frequency', 2400) # MHz
|
||||
tx_power = kwargs.get('tx_power', 20.0)
|
||||
ap_height = ap_z
|
||||
rx_height = z
|
||||
|
||||
# COST-231 Hata path loss
|
||||
if frequency < 1500:
|
||||
# COST-231 Hata model for 900-1500 MHz
|
||||
path_loss = 46.3 + 33.9 * np.log10(frequency) - 13.82 * np.log10(ap_height) - \
|
||||
(1.1 * np.log10(frequency) - 0.7) * rx_height + \
|
||||
(1.56 * np.log10(frequency) - 0.8) + \
|
||||
44.9 - 6.55 * np.log10(ap_height) * np.log10(distance/1000)
|
||||
else:
|
||||
# COST-231 Hata model for 1500-2000 MHz
|
||||
path_loss = 46.3 + 33.9 * np.log10(frequency) - 13.82 * np.log10(ap_height) - \
|
||||
(1.1 * np.log10(frequency) - 0.7) * rx_height + \
|
||||
3.0 + \
|
||||
44.9 - 6.55 * np.log10(ap_height) * np.log10(distance/1000)
|
||||
|
||||
# Add material attenuation
|
||||
material_attenuation = self._calculate_material_attenuation(ap, point, materials_grid)
|
||||
|
||||
# Calculate RSSI
|
||||
rssi = tx_power - path_loss - material_attenuation
|
||||
|
||||
return rssi
|
||||
|
||||
def _calculate_material_attenuation(self, ap, point, materials_grid):
|
||||
"""Calculate material attenuation for COST-231 model."""
|
||||
if materials_grid is None:
|
||||
return 0.0
|
||||
|
||||
# Simplified material attenuation calculation
|
||||
# In a full implementation, this would traverse the path
|
||||
return 0.0
|
||||
|
||||
class VPLEEngine(PropagationEngine):
|
||||
"""
|
||||
Variable Path Loss Exponent Engine with machine learning enhancements.
|
||||
"""
|
||||
def __init__(self, ml_model=None):
|
||||
self.ml_model = ml_model
|
||||
self.base_path_loss_exponent = 2.0
|
||||
|
||||
def calculate_rssi(self, ap, point, materials_grid, **kwargs):
|
||||
# Extract coordinates
|
||||
ap_x, ap_y, ap_z = ap if len(ap) == 3 else (ap[0], ap[1], 0)
|
||||
x, y, z = point if len(point) == 3 else (point[0], point[1], 0)
|
||||
|
||||
# Calculate distance
|
||||
distance = np.sqrt((x - ap_x)**2 + (y - ap_y)**2 + (z - ap_z)**2)
|
||||
|
||||
if distance < 1e-3:
|
||||
return kwargs.get('tx_power', 20.0)
|
||||
|
||||
# Calculate path loss exponent based on environment
|
||||
path_loss_exponent = self._calculate_path_loss_exponent(ap, point, materials_grid)
|
||||
|
||||
# Variable path loss model
|
||||
frequency = kwargs.get('frequency', 2400) # MHz
|
||||
tx_power = kwargs.get('tx_power', 20.0)
|
||||
|
||||
# Reference distance and path loss
|
||||
d0 = 1.0 # Reference distance in meters
|
||||
PL0 = 20 * np.log10(4 * np.pi * d0 * frequency * 1e6 / 3e8)
|
||||
|
||||
# Path loss
|
||||
path_loss = PL0 + 10 * path_loss_exponent * np.log10(distance / d0)
|
||||
|
||||
# Add material attenuation
|
||||
material_attenuation = self._calculate_material_attenuation(ap, point, materials_grid)
|
||||
|
||||
# Calculate RSSI
|
||||
rssi = tx_power - path_loss - material_attenuation
|
||||
|
||||
return rssi
|
||||
|
||||
def _calculate_path_loss_exponent(self, ap, point, materials_grid):
|
||||
"""Calculate path loss exponent based on environment complexity."""
|
||||
if materials_grid is None:
|
||||
return self.base_path_loss_exponent
|
||||
|
||||
# Count obstacles along the path
|
||||
obstacles = self._count_obstacles(ap, point, materials_grid)
|
||||
|
||||
# Adjust path loss exponent based on obstacles
|
||||
if obstacles == 0:
|
||||
return 2.0 # Free space
|
||||
elif obstacles < 5:
|
||||
return 2.5 # Light obstacles
|
||||
elif obstacles < 10:
|
||||
return 3.0 # Medium obstacles
|
||||
else:
|
||||
return 3.5 # Heavy obstacles
|
||||
|
||||
def _count_obstacles(self, ap, point, materials_grid):
|
||||
"""Count obstacles along the path."""
|
||||
# Simplified obstacle counting
|
||||
return 0
|
||||
|
||||
def _calculate_material_attenuation(self, ap, point, materials_grid):
|
||||
"""Calculate material attenuation for VPLE model."""
|
||||
if materials_grid is None:
|
||||
return 0.0
|
||||
|
||||
# Simplified material attenuation calculation
|
||||
return 0.0
|
||||
585
src/utils/error_handling.py
Normal file
585
src/utils/error_handling.py
Normal file
@@ -0,0 +1,585 @@
|
||||
"""
|
||||
Comprehensive Error Handling and Logging System
|
||||
|
||||
This module provides:
|
||||
- Robust exception handling for all critical operations
|
||||
- Comprehensive input validation
|
||||
- Detailed logging at multiple levels
|
||||
- Performance monitoring and profiling
|
||||
- Graceful degradation and fallback mechanisms
|
||||
"""
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
import sys
|
||||
import time
|
||||
import functools
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
import numpy as np
|
||||
from pathlib import Path
|
||||
import json
|
||||
import inspect
|
||||
|
||||
class LogLevel(Enum):
|
||||
"""Log levels for different types of information."""
|
||||
DEBUG = "DEBUG"
|
||||
INFO = "INFO"
|
||||
WARNING = "WARNING"
|
||||
ERROR = "ERROR"
|
||||
CRITICAL = "CRITICAL"
|
||||
|
||||
class ErrorSeverity(Enum):
|
||||
"""Error severity levels."""
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
CRITICAL = "critical"
|
||||
|
||||
@dataclass
|
||||
class ValidationError:
|
||||
"""Structured validation error information."""
|
||||
field_name: str
|
||||
value: Any
|
||||
expected_type: str
|
||||
constraint: str
|
||||
severity: ErrorSeverity
|
||||
message: str
|
||||
|
||||
@dataclass
|
||||
class PerformanceMetric:
|
||||
"""Performance metric tracking."""
|
||||
operation_name: str
|
||||
execution_time: float
|
||||
memory_usage: Optional[float] = None
|
||||
cpu_usage: Optional[float] = None
|
||||
timestamp: float = field(default_factory=time.time)
|
||||
|
||||
class ErrorHandler:
|
||||
"""
|
||||
Comprehensive error handling and logging system.
|
||||
"""
|
||||
|
||||
def __init__(self, log_file: Optional[str] = None, log_level: LogLevel = LogLevel.INFO):
|
||||
"""Initialize the error handler."""
|
||||
self.log_file = log_file
|
||||
self.log_level = log_level
|
||||
self.validation_errors: List[ValidationError] = []
|
||||
self.performance_metrics: List[PerformanceMetric] = []
|
||||
self.error_count = 0
|
||||
self.warning_count = 0
|
||||
|
||||
# Setup logging
|
||||
self._setup_logging()
|
||||
|
||||
# Performance tracking
|
||||
self.operation_timers = {}
|
||||
|
||||
logger.info("Error Handler initialized")
|
||||
|
||||
def _setup_logging(self):
|
||||
"""Setup comprehensive logging configuration."""
|
||||
# Create formatter
|
||||
formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s'
|
||||
)
|
||||
|
||||
# Setup root logger
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(getattr(logging, self.log_level.value))
|
||||
|
||||
# Console handler
|
||||
console_handler = logging.StreamHandler(sys.stdout)
|
||||
console_handler.setLevel(getattr(logging, self.log_level.value))
|
||||
console_handler.setFormatter(formatter)
|
||||
root_logger.addHandler(console_handler)
|
||||
|
||||
# File handler (if specified)
|
||||
if self.log_file:
|
||||
file_handler = logging.FileHandler(self.log_file)
|
||||
file_handler.setLevel(logging.DEBUG) # Always log everything to file
|
||||
file_handler.setFormatter(formatter)
|
||||
root_logger.addHandler(file_handler)
|
||||
|
||||
# Suppress warnings from specific libraries
|
||||
warnings.filterwarnings("ignore", category=UserWarning, module="matplotlib")
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="numpy")
|
||||
|
||||
logger.info(f"Logging configured - Level: {self.log_level.value}, File: {self.log_file}")
|
||||
|
||||
def validate_input(self, value: Any, expected_type: Union[type, Tuple[type, ...]],
|
||||
field_name: str = "", constraints: Dict[str, Any] = None) -> bool:
|
||||
"""
|
||||
Validate input with comprehensive error reporting.
|
||||
|
||||
Args:
|
||||
value: Value to validate
|
||||
expected_type: Expected type(s)
|
||||
field_name: Name of the field being validated
|
||||
constraints: Additional constraints (min, max, pattern, etc.)
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Type validation
|
||||
if not isinstance(value, expected_type):
|
||||
error = ValidationError(
|
||||
field_name=field_name,
|
||||
value=value,
|
||||
expected_type=str(expected_type),
|
||||
constraint="type",
|
||||
severity=ErrorSeverity.HIGH,
|
||||
message=f"Expected {expected_type}, got {type(value)}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.error(f"Validation error: {error.message}")
|
||||
return False
|
||||
|
||||
# Additional constraints
|
||||
if constraints:
|
||||
if not self._check_constraints(value, constraints, field_name):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during validation of {field_name}: {e}")
|
||||
return False
|
||||
|
||||
def _check_constraints(self, value: Any, constraints: Dict[str, Any], field_name: str) -> bool:
|
||||
"""Check additional constraints on a value."""
|
||||
try:
|
||||
# Numeric constraints
|
||||
if isinstance(value, (int, float, np.number)):
|
||||
if 'min' in constraints and value < constraints['min']:
|
||||
error = ValidationError(
|
||||
field_name=field_name,
|
||||
value=value,
|
||||
expected_type="numeric",
|
||||
constraint=f"min={constraints['min']}",
|
||||
severity=ErrorSeverity.MEDIUM,
|
||||
message=f"Value {value} is below minimum {constraints['min']}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.warning(f"Constraint violation: {error.message}")
|
||||
return False
|
||||
|
||||
if 'max' in constraints and value > constraints['max']:
|
||||
error = ValidationError(
|
||||
field_name=field_name,
|
||||
value=value,
|
||||
expected_type="numeric",
|
||||
constraint=f"max={constraints['max']}",
|
||||
severity=ErrorSeverity.MEDIUM,
|
||||
message=f"Value {value} is above maximum {constraints['max']}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.warning(f"Constraint violation: {error.message}")
|
||||
return False
|
||||
|
||||
# String constraints
|
||||
if isinstance(value, str):
|
||||
if 'min_length' in constraints and len(value) < constraints['min_length']:
|
||||
error = ValidationError(
|
||||
field_name=field_name,
|
||||
value=value,
|
||||
expected_type="string",
|
||||
constraint=f"min_length={constraints['min_length']}",
|
||||
severity=ErrorSeverity.MEDIUM,
|
||||
message=f"String length {len(value)} is below minimum {constraints['min_length']}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.warning(f"Constraint violation: {error.message}")
|
||||
return False
|
||||
|
||||
if 'max_length' in constraints and len(value) > constraints['max_length']:
|
||||
error = ValidationError(
|
||||
field_name=field_name,
|
||||
value=value,
|
||||
expected_type="string",
|
||||
constraint=f"max_length={constraints['max_length']}",
|
||||
severity=ErrorSeverity.MEDIUM,
|
||||
message=f"String length {len(value)} is above maximum {constraints['max_length']}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.warning(f"Constraint violation: {error.message}")
|
||||
return False
|
||||
|
||||
# Array/list constraints
|
||||
if isinstance(value, (list, np.ndarray)):
|
||||
if 'min_length' in constraints and len(value) < constraints['min_length']:
|
||||
error = ValidationError(
|
||||
field_name=field_name,
|
||||
value=value,
|
||||
expected_type="array",
|
||||
constraint=f"min_length={constraints['min_length']}",
|
||||
severity=ErrorSeverity.MEDIUM,
|
||||
message=f"Array length {len(value)} is below minimum {constraints['min_length']}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.warning(f"Constraint violation: {error.message}")
|
||||
return False
|
||||
|
||||
if 'max_length' in constraints and len(value) > constraints['max_length']:
|
||||
error = ValidationError(
|
||||
field_name=field_name,
|
||||
value=value,
|
||||
expected_type="array",
|
||||
constraint=f"max_length={constraints['max_length']}",
|
||||
severity=ErrorSeverity.MEDIUM,
|
||||
message=f"Array length {len(value)} is above maximum {constraints['max_length']}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.warning(f"Constraint violation: {error.message}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking constraints for {field_name}: {e}")
|
||||
return False
|
||||
|
||||
def safe_operation(self, operation: Callable, *args, fallback_value: Any = None,
|
||||
operation_name: str = "", **kwargs) -> Any:
|
||||
"""
|
||||
Execute an operation with comprehensive error handling.
|
||||
|
||||
Args:
|
||||
operation: Function to execute
|
||||
*args: Arguments for the operation
|
||||
fallback_value: Value to return if operation fails
|
||||
operation_name: Name of the operation for logging
|
||||
**kwargs: Keyword arguments for the operation
|
||||
|
||||
Returns:
|
||||
Result of operation or fallback value
|
||||
"""
|
||||
if not operation_name:
|
||||
operation_name = operation.__name__
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
logger.debug(f"Starting operation: {operation_name}")
|
||||
|
||||
# Execute operation
|
||||
result = operation(*args, **kwargs)
|
||||
|
||||
# Record performance
|
||||
execution_time = time.time() - start_time
|
||||
metric = PerformanceMetric(
|
||||
operation_name=operation_name,
|
||||
execution_time=execution_time
|
||||
)
|
||||
self.performance_metrics.append(metric)
|
||||
|
||||
logger.debug(f"Operation {operation_name} completed in {execution_time:.4f}s")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
execution_time = time.time() - start_time
|
||||
self.error_count += 1
|
||||
|
||||
# Log detailed error information
|
||||
logger.error(f"Operation {operation_name} failed after {execution_time:.4f}s")
|
||||
logger.error(f"Error type: {type(e).__name__}")
|
||||
logger.error(f"Error message: {str(e)}")
|
||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||
|
||||
# Record failed operation
|
||||
metric = PerformanceMetric(
|
||||
operation_name=f"{operation_name}_FAILED",
|
||||
execution_time=execution_time
|
||||
)
|
||||
self.performance_metrics.append(metric)
|
||||
|
||||
if fallback_value is not None:
|
||||
logger.info(f"Using fallback value for {operation_name}")
|
||||
return fallback_value
|
||||
else:
|
||||
raise
|
||||
|
||||
def performance_monitor(self, operation_name: str = ""):
|
||||
"""
|
||||
Decorator for performance monitoring.
|
||||
|
||||
Usage:
|
||||
@error_handler.performance_monitor("my_operation")
|
||||
def my_function():
|
||||
pass
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
name = operation_name or func.__name__
|
||||
return self.safe_operation(func, *args, operation_name=name, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def validate_config(self, config: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Validate configuration dictionary with comprehensive checks.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary to validate
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
logger.info("Validating configuration...")
|
||||
|
||||
# Required fields
|
||||
required_fields = {
|
||||
'building_width': (float, {'min': 0.1, 'max': 1000.0}),
|
||||
'building_length': (float, {'min': 0.1, 'max': 1000.0}),
|
||||
'building_height': (float, {'min': 0.1, 'max': 100.0}),
|
||||
'target_coverage': (float, {'min': 0.0, 'max': 1.0}),
|
||||
}
|
||||
|
||||
for field_name, (expected_type, constraints) in required_fields.items():
|
||||
if field_name not in config:
|
||||
error = ValidationError(
|
||||
field_name=field_name,
|
||||
value=None,
|
||||
expected_type=str(expected_type),
|
||||
constraint="required",
|
||||
severity=ErrorSeverity.CRITICAL,
|
||||
message=f"Required field '{field_name}' is missing"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.error(f"Missing required field: {field_name}")
|
||||
return False
|
||||
|
||||
if not self.validate_input(config[field_name], expected_type, field_name, constraints):
|
||||
return False
|
||||
|
||||
# Optional fields with validation
|
||||
optional_fields = {
|
||||
'tx_power': (float, {'min': -10.0, 'max': 30.0}),
|
||||
'frequency': (float, {'min': 1e9, 'max': 10e9}),
|
||||
'noise_floor': (float, {'min': -120.0, 'max': -50.0}),
|
||||
}
|
||||
|
||||
for field_name, (expected_type, constraints) in optional_fields.items():
|
||||
if field_name in config:
|
||||
if not self.validate_input(config[field_name], expected_type, field_name, constraints):
|
||||
logger.warning(f"Optional field {field_name} has invalid value")
|
||||
|
||||
logger.info("Configuration validation completed")
|
||||
return len([e for e in self.validation_errors if e.severity == ErrorSeverity.CRITICAL]) == 0
|
||||
|
||||
def validate_materials_grid(self, materials_grid: np.ndarray,
|
||||
expected_shape: Tuple[int, int, int]) -> bool:
|
||||
"""
|
||||
Validate materials grid with comprehensive checks.
|
||||
|
||||
Args:
|
||||
materials_grid: 3D materials grid to validate
|
||||
expected_shape: Expected shape (z, y, x)
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
logger.info("Validating materials grid...")
|
||||
|
||||
# Type validation
|
||||
if not self.validate_input(materials_grid, np.ndarray, "materials_grid"):
|
||||
return False
|
||||
|
||||
# Shape validation
|
||||
if materials_grid.shape != expected_shape:
|
||||
error = ValidationError(
|
||||
field_name="materials_grid",
|
||||
value=materials_grid.shape,
|
||||
expected_type=f"shape {expected_shape}",
|
||||
constraint="shape",
|
||||
severity=ErrorSeverity.CRITICAL,
|
||||
message=f"Expected shape {expected_shape}, got {materials_grid.shape}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.error(f"Materials grid shape mismatch: {error.message}")
|
||||
return False
|
||||
|
||||
# Check for NaN or infinite values
|
||||
if np.any(np.isnan(materials_grid)):
|
||||
logger.warning("Materials grid contains NaN values")
|
||||
|
||||
if np.any(np.isinf(materials_grid)):
|
||||
logger.warning("Materials grid contains infinite values")
|
||||
|
||||
# Check for negative material IDs
|
||||
if np.any(materials_grid < 0):
|
||||
logger.warning("Materials grid contains negative material IDs")
|
||||
|
||||
logger.info("Materials grid validation completed")
|
||||
return True
|
||||
|
||||
def validate_ap_locations(self, ap_locations: List[Tuple[float, float, float]],
|
||||
building_dimensions: Tuple[float, float, float]) -> bool:
|
||||
"""
|
||||
Validate AP locations with boundary checks.
|
||||
|
||||
Args:
|
||||
ap_locations: List of AP coordinates
|
||||
building_dimensions: Building dimensions (width, length, height)
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
logger.info("Validating AP locations...")
|
||||
|
||||
if not self.validate_input(ap_locations, list, "ap_locations"):
|
||||
return False
|
||||
|
||||
width, length, height = building_dimensions
|
||||
|
||||
for i, ap_location in enumerate(ap_locations):
|
||||
if not self.validate_input(ap_location, tuple, f"ap_location_{i}"):
|
||||
return False
|
||||
|
||||
if len(ap_location) != 3:
|
||||
error = ValidationError(
|
||||
field_name=f"ap_location_{i}",
|
||||
value=ap_location,
|
||||
expected_type="tuple of length 3",
|
||||
constraint="length",
|
||||
severity=ErrorSeverity.HIGH,
|
||||
message=f"AP location must have 3 coordinates, got {len(ap_location)}"
|
||||
)
|
||||
self.validation_errors.append(error)
|
||||
logger.error(f"AP location validation error: {error.message}")
|
||||
return False
|
||||
|
||||
x, y, z = ap_location
|
||||
|
||||
# Check bounds
|
||||
if not (0 <= x <= width):
|
||||
logger.warning(f"AP {i} x-coordinate {x} is outside building width [0, {width}]")
|
||||
|
||||
if not (0 <= y <= length):
|
||||
logger.warning(f"AP {i} y-coordinate {y} is outside building length [0, {length}]")
|
||||
|
||||
if not (0 <= z <= height):
|
||||
logger.warning(f"AP {i} z-coordinate {z} is outside building height [0, {height}]")
|
||||
|
||||
logger.info("AP locations validation completed")
|
||||
return True
|
||||
|
||||
def get_validation_report(self) -> Dict[str, Any]:
|
||||
"""Get comprehensive validation report."""
|
||||
critical_errors = [e for e in self.validation_errors if e.severity == ErrorSeverity.CRITICAL]
|
||||
high_errors = [e for e in self.validation_errors if e.severity == ErrorSeverity.HIGH]
|
||||
medium_errors = [e for e in self.validation_errors if e.severity == ErrorSeverity.MEDIUM]
|
||||
low_errors = [e for e in self.validation_errors if e.severity == ErrorSeverity.LOW]
|
||||
|
||||
return {
|
||||
'total_errors': len(self.validation_errors),
|
||||
'critical_errors': len(critical_errors),
|
||||
'high_errors': len(high_errors),
|
||||
'medium_errors': len(medium_errors),
|
||||
'low_errors': len(low_errors),
|
||||
'error_count': self.error_count,
|
||||
'warning_count': self.warning_count,
|
||||
'validation_passed': len(critical_errors) == 0,
|
||||
'performance_metrics': {
|
||||
'total_operations': len(self.performance_metrics),
|
||||
'avg_execution_time': np.mean([m.execution_time for m in self.performance_metrics]) if self.performance_metrics else 0.0,
|
||||
'max_execution_time': max([m.execution_time for m in self.performance_metrics]) if self.performance_metrics else 0.0,
|
||||
'min_execution_time': min([m.execution_time for m in self.performance_metrics]) if self.performance_metrics else 0.0,
|
||||
},
|
||||
'detailed_errors': [
|
||||
{
|
||||
'field_name': e.field_name,
|
||||
'severity': e.severity.value,
|
||||
'message': e.message
|
||||
}
|
||||
for e in self.validation_errors
|
||||
]
|
||||
}
|
||||
|
||||
def save_error_report(self, filepath: str):
|
||||
"""Save error report to file."""
|
||||
try:
|
||||
report = self.get_validation_report()
|
||||
|
||||
with open(filepath, 'w') as f:
|
||||
json.dump(report, f, indent=2, default=str)
|
||||
|
||||
logger.info(f"Error report saved to {filepath}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving error report: {e}")
|
||||
|
||||
def clear_errors(self):
|
||||
"""Clear all error tracking."""
|
||||
self.validation_errors.clear()
|
||||
self.performance_metrics.clear()
|
||||
self.error_count = 0
|
||||
self.warning_count = 0
|
||||
logger.info("Error tracking cleared")
|
||||
|
||||
# Global error handler instance
|
||||
error_handler = ErrorHandler()
|
||||
|
||||
def test_error_handling():
|
||||
"""Test the error handling system."""
|
||||
print("Testing Error Handling System...")
|
||||
|
||||
# Test input validation
|
||||
assert error_handler.validate_input(5, int, "test_int", {'min': 0, 'max': 10})
|
||||
assert not error_handler.validate_input(-1, int, "test_int", {'min': 0, 'max': 10})
|
||||
|
||||
# Test safe operation
|
||||
def test_function(x, y):
|
||||
return x + y
|
||||
|
||||
result = error_handler.safe_operation(test_function, 2, 3, operation_name="test_add")
|
||||
assert result == 5
|
||||
|
||||
# Test operation with error
|
||||
def failing_function():
|
||||
raise ValueError("Test error")
|
||||
|
||||
result = error_handler.safe_operation(failing_function, fallback_value=42)
|
||||
assert result == 42
|
||||
|
||||
# Test performance monitoring decorator
|
||||
@error_handler.performance_monitor("decorated_function")
|
||||
def slow_function():
|
||||
time.sleep(0.1)
|
||||
return "done"
|
||||
|
||||
result = slow_function()
|
||||
assert result == "done"
|
||||
|
||||
# Test configuration validation
|
||||
config = {
|
||||
'building_width': 50.0,
|
||||
'building_length': 30.0,
|
||||
'building_height': 3.0,
|
||||
'target_coverage': 0.9,
|
||||
'tx_power': 20.0
|
||||
}
|
||||
|
||||
assert error_handler.validate_config(config)
|
||||
|
||||
# Test materials grid validation
|
||||
materials_grid = np.random.randint(0, 5, (10, 20, 30))
|
||||
assert error_handler.validate_materials_grid(materials_grid, (10, 20, 30))
|
||||
|
||||
# Test AP locations validation
|
||||
ap_locations = [(10.0, 15.0, 2.7), (25.0, 10.0, 2.7)]
|
||||
building_dimensions = (50.0, 30.0, 3.0)
|
||||
assert error_handler.validate_ap_locations(ap_locations, building_dimensions)
|
||||
|
||||
# Get validation report
|
||||
report = error_handler.get_validation_report()
|
||||
print(f"Validation report: {report}")
|
||||
|
||||
print("Error Handling System test completed successfully!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_error_handling()
|
||||
572
src/utils/performance_optimizer.py
Normal file
572
src/utils/performance_optimizer.py
Normal file
@@ -0,0 +1,572 @@
|
||||
"""
|
||||
Performance Optimization and Profiling System
|
||||
|
||||
This module provides:
|
||||
- Advanced profiling and performance monitoring
|
||||
- Vectorized operations using NumPy
|
||||
- Parallel processing for independent calculations
|
||||
- Intelligent caching strategies
|
||||
- Memory optimization
|
||||
- Performance bottleneck identification
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import io
|
||||
import psutil
|
||||
import multiprocessing as mp
|
||||
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
|
||||
from functools import lru_cache, wraps
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
import threading
|
||||
import gc
|
||||
import weakref
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ProfilerMode(Enum):
|
||||
"""Profiling modes."""
|
||||
DISABLED = "disabled"
|
||||
BASIC = "basic"
|
||||
DETAILED = "detailed"
|
||||
MEMORY = "memory"
|
||||
|
||||
@dataclass
|
||||
class PerformanceProfile:
|
||||
"""Performance profile data."""
|
||||
function_name: str
|
||||
total_time: float
|
||||
call_count: int
|
||||
avg_time: float
|
||||
min_time: float
|
||||
max_time: float
|
||||
memory_usage: Optional[float] = None
|
||||
cpu_usage: Optional[float] = None
|
||||
|
||||
@dataclass
|
||||
class CacheStats:
|
||||
"""Cache statistics."""
|
||||
cache_name: str
|
||||
hits: int
|
||||
misses: int
|
||||
size: int
|
||||
max_size: int
|
||||
hit_rate: float
|
||||
|
||||
class PerformanceOptimizer:
|
||||
"""
|
||||
Advanced performance optimization and profiling system.
|
||||
"""
|
||||
|
||||
def __init__(self, profiler_mode: ProfilerMode = ProfilerMode.BASIC):
|
||||
"""Initialize the performance optimizer."""
|
||||
self.profiler_mode = profiler_mode
|
||||
self.profiles: Dict[str, PerformanceProfile] = {}
|
||||
self.cache_stats: Dict[str, CacheStats] = {}
|
||||
self.memory_tracker = MemoryTracker()
|
||||
self.profiler = None
|
||||
self.stats = None
|
||||
|
||||
# Performance tracking
|
||||
self.start_time = time.time()
|
||||
self.operation_times = {}
|
||||
|
||||
logger.info(f"Performance Optimizer initialized with mode: {profiler_mode.value}")
|
||||
|
||||
def start_profiling(self):
|
||||
"""Start profiling if enabled."""
|
||||
if self.profiler_mode != ProfilerMode.DISABLED:
|
||||
self.profiler = cProfile.Profile()
|
||||
self.profiler.enable()
|
||||
logger.info("Profiling started")
|
||||
|
||||
def stop_profiling(self) -> Optional[str]:
|
||||
"""Stop profiling and return statistics."""
|
||||
if self.profiler is not None:
|
||||
self.profiler.disable()
|
||||
s = io.StringIO()
|
||||
self.stats = pstats.Stats(self.profiler, stream=s).sort_stats('cumulative')
|
||||
self.stats.print_stats(20) # Top 20 functions
|
||||
logger.info("Profiling stopped")
|
||||
return s.getvalue()
|
||||
return None
|
||||
|
||||
def profile_function(self, func: Callable, *args, **kwargs) -> Tuple[Any, PerformanceProfile]:
|
||||
"""Profile a single function execution."""
|
||||
start_time = time.time()
|
||||
start_memory = self.memory_tracker.get_memory_usage()
|
||||
|
||||
try:
|
||||
result = func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in profiled function {func.__name__}: {e}")
|
||||
raise
|
||||
|
||||
end_time = time.time()
|
||||
end_memory = self.memory_tracker.get_memory_usage()
|
||||
|
||||
execution_time = end_time - start_time
|
||||
memory_usage = end_memory - start_memory if end_memory and start_memory else None
|
||||
|
||||
# Update profile
|
||||
if func.__name__ not in self.profiles:
|
||||
self.profiles[func.__name__] = PerformanceProfile(
|
||||
function_name=func.__name__,
|
||||
total_time=execution_time,
|
||||
call_count=1,
|
||||
avg_time=execution_time,
|
||||
min_time=execution_time,
|
||||
max_time=execution_time,
|
||||
memory_usage=memory_usage
|
||||
)
|
||||
else:
|
||||
profile = self.profiles[func.__name__]
|
||||
profile.total_time += execution_time
|
||||
profile.call_count += 1
|
||||
profile.avg_time = profile.total_time / profile.call_count
|
||||
profile.min_time = min(profile.min_time, execution_time)
|
||||
profile.max_time = max(profile.max_time, execution_time)
|
||||
if memory_usage is not None:
|
||||
profile.memory_usage = memory_usage
|
||||
|
||||
return result, self.profiles[func.__name__]
|
||||
|
||||
def profile_decorator(self, func: Callable) -> Callable:
|
||||
"""Decorator for profiling functions."""
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return self.profile_function(func, *args, **kwargs)[0]
|
||||
return wrapper
|
||||
|
||||
def vectorized_rssi_calculation(self, ap_locations: np.ndarray,
|
||||
points: np.ndarray,
|
||||
tx_power: float = 20.0,
|
||||
frequency: float = 2.4e9) -> np.ndarray:
|
||||
"""
|
||||
Vectorized RSSI calculation for multiple APs and points.
|
||||
|
||||
Args:
|
||||
ap_locations: Array of AP coordinates (n_aps, 3)
|
||||
points: Array of receiver points (n_points, 3)
|
||||
tx_power: Transmit power in dBm
|
||||
frequency: Frequency in Hz
|
||||
|
||||
Returns:
|
||||
RSSI matrix (n_aps, n_points)
|
||||
"""
|
||||
try:
|
||||
n_aps = ap_locations.shape[0]
|
||||
n_points = points.shape[0]
|
||||
|
||||
# Reshape for broadcasting
|
||||
ap_locations_expanded = ap_locations[:, np.newaxis, :] # (n_aps, 1, 3)
|
||||
points_expanded = points[np.newaxis, :, :] # (1, n_points, 3)
|
||||
|
||||
# Calculate distances vectorized
|
||||
distances = np.sqrt(np.sum((ap_locations_expanded - points_expanded) ** 2, axis=2)) # (n_aps, n_points)
|
||||
|
||||
# Avoid division by zero
|
||||
distances = np.maximum(distances, 1e-6)
|
||||
|
||||
# Calculate free space path loss vectorized
|
||||
wavelength = 3e8 / frequency
|
||||
free_space_loss = 20 * np.log10(4 * np.pi * distances / wavelength)
|
||||
|
||||
# Calculate RSSI vectorized
|
||||
rssi = tx_power - free_space_loss
|
||||
|
||||
# Clip to reasonable range
|
||||
rssi = np.clip(rssi, -100.0, 0.0)
|
||||
|
||||
return rssi
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in vectorized RSSI calculation: {e}")
|
||||
return np.full((n_aps, n_points), -100.0)
|
||||
|
||||
def vectorized_material_attenuation(self, start_points: np.ndarray,
|
||||
end_points: np.ndarray,
|
||||
materials_grid: np.ndarray,
|
||||
resolution: float = 0.2) -> np.ndarray:
|
||||
"""
|
||||
Vectorized material attenuation calculation.
|
||||
|
||||
Args:
|
||||
start_points: Array of start points (n_paths, 3)
|
||||
end_points: Array of end points (n_paths, 3)
|
||||
materials_grid: 3D materials grid
|
||||
resolution: Grid resolution
|
||||
|
||||
Returns:
|
||||
Attenuation array (n_paths,)
|
||||
"""
|
||||
try:
|
||||
n_paths = start_points.shape[0]
|
||||
attenuations = np.zeros(n_paths)
|
||||
|
||||
# Calculate path vectors
|
||||
path_vectors = end_points - start_points
|
||||
path_lengths = np.sqrt(np.sum(path_vectors ** 2, axis=1))
|
||||
|
||||
# Normalize path vectors
|
||||
path_directions = path_vectors / (path_lengths[:, np.newaxis] + 1e-6)
|
||||
|
||||
# Calculate number of steps for each path
|
||||
max_steps = int(np.max(path_lengths) / resolution) + 1
|
||||
|
||||
# Vectorized path traversal
|
||||
for step in range(max_steps):
|
||||
# Calculate current positions
|
||||
t = step / max_steps
|
||||
current_positions = start_points + t * path_vectors
|
||||
|
||||
# Convert to grid coordinates
|
||||
grid_coords = (current_positions / resolution).astype(int)
|
||||
|
||||
# Clamp to grid bounds
|
||||
grid_coords = np.clip(grid_coords, 0, np.array(materials_grid.shape) - 1)
|
||||
|
||||
# Get materials at current positions
|
||||
materials = materials_grid[grid_coords[:, 2], grid_coords[:, 1], grid_coords[:, 0]]
|
||||
|
||||
# Calculate attenuation for this step
|
||||
step_lengths = path_lengths / max_steps
|
||||
step_attenuations = np.array([
|
||||
self._get_material_attenuation(material, step_lengths[i], 2.4e9)
|
||||
for i, material in enumerate(materials)
|
||||
])
|
||||
|
||||
attenuations += step_attenuations
|
||||
|
||||
return attenuations
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in vectorized material attenuation: {e}")
|
||||
return np.zeros(n_paths)
|
||||
|
||||
def _get_material_attenuation(self, material, distance: float, frequency: float) -> float:
|
||||
"""Get attenuation for a material."""
|
||||
try:
|
||||
if hasattr(material, 'calculate_attenuation'):
|
||||
return material.calculate_attenuation(frequency, distance)
|
||||
else:
|
||||
return 0.0
|
||||
except Exception:
|
||||
return 0.0
|
||||
|
||||
def parallel_rssi_calculation(self, ap_locations: List[Tuple[float, float, float]],
|
||||
points: List[Tuple[float, float, float]],
|
||||
materials_grid: np.ndarray,
|
||||
tx_power: float = 20.0,
|
||||
max_workers: int = None) -> np.ndarray:
|
||||
"""
|
||||
Parallel RSSI calculation using multiprocessing.
|
||||
|
||||
Args:
|
||||
ap_locations: List of AP coordinates
|
||||
points: List of receiver points
|
||||
materials_grid: 3D materials grid
|
||||
tx_power: Transmit power in dBm
|
||||
max_workers: Maximum number of workers
|
||||
|
||||
Returns:
|
||||
RSSI matrix (n_aps, n_points)
|
||||
"""
|
||||
try:
|
||||
if max_workers is None:
|
||||
max_workers = min(mp.cpu_count(), len(ap_locations))
|
||||
|
||||
n_aps = len(ap_locations)
|
||||
n_points = len(points)
|
||||
|
||||
# Initialize result matrix
|
||||
rssi_matrix = np.full((n_aps, n_points), -100.0)
|
||||
|
||||
# Use parallel processing for large calculations
|
||||
if n_aps * n_points > 1000: # Threshold for parallel processing
|
||||
logger.info(f"Using parallel processing with {max_workers} workers")
|
||||
|
||||
with ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
# Submit tasks for each AP
|
||||
futures = []
|
||||
for ap_idx, ap_location in enumerate(ap_locations):
|
||||
future = executor.submit(
|
||||
self._calculate_rssi_for_ap_parallel,
|
||||
ap_location, points, materials_grid, tx_power
|
||||
)
|
||||
futures.append((ap_idx, future))
|
||||
|
||||
# Collect results
|
||||
for ap_idx, future in futures:
|
||||
try:
|
||||
rssi_values = future.result()
|
||||
rssi_matrix[ap_idx, :] = rssi_values
|
||||
except Exception as e:
|
||||
logger.error(f"Error in parallel RSSI calculation for AP {ap_idx}: {e}")
|
||||
rssi_matrix[ap_idx, :] = -100.0
|
||||
else:
|
||||
# Sequential processing for small calculations
|
||||
for ap_idx, ap_location in enumerate(ap_locations):
|
||||
rssi_values = self._calculate_rssi_for_ap_parallel(
|
||||
ap_location, points, materials_grid, tx_power
|
||||
)
|
||||
rssi_matrix[ap_idx, :] = rssi_values
|
||||
|
||||
return rssi_matrix
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in parallel RSSI calculation: {e}")
|
||||
return np.full((len(ap_locations), len(points)), -100.0)
|
||||
|
||||
def _calculate_rssi_for_ap_parallel(self, ap_location: Tuple[float, float, float],
|
||||
points: List[Tuple[float, float, float]],
|
||||
materials_grid: np.ndarray,
|
||||
tx_power: float) -> np.ndarray:
|
||||
"""Calculate RSSI for one AP at multiple points (for parallel processing)."""
|
||||
try:
|
||||
rssi_values = []
|
||||
|
||||
for point in points:
|
||||
# Calculate distance
|
||||
distance = np.sqrt(sum((ap_location[i] - point[i])**2 for i in range(3)))
|
||||
|
||||
if distance < 1e-6:
|
||||
rssi_values.append(tx_power)
|
||||
continue
|
||||
|
||||
# Free space path loss
|
||||
wavelength = 3e8 / 2.4e9
|
||||
free_space_loss = 20 * np.log10(4 * np.pi * distance / wavelength)
|
||||
|
||||
# Material attenuation (simplified for parallel processing)
|
||||
material_attenuation = 0.0 # Could be enhanced with actual material calculation
|
||||
|
||||
# Total RSSI
|
||||
rssi = tx_power - free_space_loss - material_attenuation
|
||||
rssi_values.append(rssi)
|
||||
|
||||
return np.array(rssi_values)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating RSSI for AP: {e}")
|
||||
return np.full(len(points), -100.0)
|
||||
|
||||
def create_cache(self, cache_name: str, max_size: int = 1000) -> Callable:
|
||||
"""
|
||||
Create a named cache with statistics tracking.
|
||||
|
||||
Args:
|
||||
cache_name: Name of the cache
|
||||
max_size: Maximum cache size
|
||||
|
||||
Returns:
|
||||
Decorator function for caching
|
||||
"""
|
||||
cache = {}
|
||||
cache_stats = CacheStats(
|
||||
cache_name=cache_name,
|
||||
hits=0,
|
||||
misses=0,
|
||||
size=0,
|
||||
max_size=max_size,
|
||||
hit_rate=0.0
|
||||
)
|
||||
|
||||
self.cache_stats[cache_name] = cache_stats
|
||||
|
||||
def cache_decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Create cache key
|
||||
cache_key = str((args, tuple(sorted(kwargs.items()))))
|
||||
|
||||
if cache_key in cache:
|
||||
# Cache hit
|
||||
cache_stats.hits += 1
|
||||
cache_stats.hit_rate = cache_stats.hits / (cache_stats.hits + cache_stats.misses)
|
||||
return cache[cache_key]
|
||||
else:
|
||||
# Cache miss
|
||||
cache_stats.misses += 1
|
||||
result = func(*args, **kwargs)
|
||||
|
||||
# Add to cache if not full
|
||||
if len(cache) < max_size:
|
||||
cache[cache_key] = result
|
||||
cache_stats.size = len(cache)
|
||||
|
||||
cache_stats.hit_rate = cache_stats.hits / (cache_stats.hits + cache_stats.misses)
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return cache_decorator
|
||||
|
||||
def optimize_memory_usage(self):
|
||||
"""Optimize memory usage by clearing caches and running garbage collection."""
|
||||
try:
|
||||
# Clear all caches
|
||||
for cache_name in self.cache_stats:
|
||||
cache_stats = self.cache_stats[cache_name]
|
||||
cache_stats.hits = 0
|
||||
cache_stats.misses = 0
|
||||
cache_stats.size = 0
|
||||
cache_stats.hit_rate = 0.0
|
||||
|
||||
# Run garbage collection
|
||||
gc.collect()
|
||||
|
||||
# Clear operation times
|
||||
self.operation_times.clear()
|
||||
|
||||
logger.info("Memory optimization completed")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in memory optimization: {e}")
|
||||
|
||||
def get_performance_report(self) -> Dict[str, Any]:
|
||||
"""Get comprehensive performance report."""
|
||||
total_time = time.time() - self.start_time
|
||||
|
||||
# Calculate performance metrics
|
||||
avg_times = {}
|
||||
for func_name, times in self.operation_times.items():
|
||||
if times:
|
||||
avg_times[func_name] = np.mean(times)
|
||||
|
||||
# Memory usage
|
||||
memory_usage = self.memory_tracker.get_memory_usage()
|
||||
|
||||
return {
|
||||
'total_runtime': total_time,
|
||||
'function_profiles': {
|
||||
name: {
|
||||
'total_time': profile.total_time,
|
||||
'call_count': profile.call_count,
|
||||
'avg_time': profile.avg_time,
|
||||
'min_time': profile.min_time,
|
||||
'max_time': profile.max_time,
|
||||
'memory_usage': profile.memory_usage
|
||||
}
|
||||
for name, profile in self.profiles.items()
|
||||
},
|
||||
'cache_statistics': {
|
||||
name: {
|
||||
'hits': stats.hits,
|
||||
'misses': stats.misses,
|
||||
'size': stats.size,
|
||||
'max_size': stats.max_size,
|
||||
'hit_rate': stats.hit_rate
|
||||
}
|
||||
for name, stats in self.cache_stats.items()
|
||||
},
|
||||
'memory_usage_mb': memory_usage,
|
||||
'average_function_times': avg_times
|
||||
}
|
||||
|
||||
def identify_bottlenecks(self) -> List[Dict[str, Any]]:
|
||||
"""Identify performance bottlenecks."""
|
||||
bottlenecks = []
|
||||
|
||||
# Check function performance
|
||||
for func_name, profile in self.profiles.items():
|
||||
if profile.avg_time > 0.1: # Functions taking more than 100ms on average
|
||||
bottlenecks.append({
|
||||
'type': 'function',
|
||||
'name': func_name,
|
||||
'avg_time': profile.avg_time,
|
||||
'call_count': profile.call_count,
|
||||
'total_time': profile.total_time,
|
||||
'suggestion': 'Consider optimization or caching'
|
||||
})
|
||||
|
||||
# Check cache performance
|
||||
for cache_name, stats in self.cache_stats.items():
|
||||
if stats.hit_rate < 0.5: # Low cache hit rate
|
||||
bottlenecks.append({
|
||||
'type': 'cache',
|
||||
'name': cache_name,
|
||||
'hit_rate': stats.hit_rate,
|
||||
'suggestion': 'Review cache key strategy or increase cache size'
|
||||
})
|
||||
|
||||
# Check memory usage
|
||||
memory_usage = self.memory_tracker.get_memory_usage()
|
||||
if memory_usage and memory_usage > 1000: # More than 1GB
|
||||
bottlenecks.append({
|
||||
'type': 'memory',
|
||||
'usage_mb': memory_usage,
|
||||
'suggestion': 'Consider memory optimization or data structure changes'
|
||||
})
|
||||
|
||||
return sorted(bottlenecks, key=lambda x: x.get('avg_time', 0), reverse=True)
|
||||
|
||||
class MemoryTracker:
|
||||
"""Track memory usage."""
|
||||
|
||||
def __init__(self):
|
||||
self.process = psutil.Process()
|
||||
|
||||
def get_memory_usage(self) -> Optional[float]:
|
||||
"""Get current memory usage in MB."""
|
||||
try:
|
||||
memory_info = self.process.memory_info()
|
||||
return memory_info.rss / 1024 / 1024 # Convert to MB
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def test_performance_optimizer():
|
||||
"""Test the performance optimizer."""
|
||||
print("Testing Performance Optimizer...")
|
||||
|
||||
# Initialize optimizer
|
||||
optimizer = PerformanceOptimizer(ProfilerMode.BASIC)
|
||||
|
||||
# Test vectorized RSSI calculation
|
||||
ap_locations = np.array([[10.0, 10.0, 2.7], [30.0, 30.0, 2.7]])
|
||||
points = np.array([[5.0, 5.0, 1.5], [15.0, 15.0, 1.5], [25.0, 25.0, 1.5]])
|
||||
|
||||
rssi_matrix = optimizer.vectorized_rssi_calculation(ap_locations, points)
|
||||
print(f"Vectorized RSSI matrix shape: {rssi_matrix.shape}")
|
||||
|
||||
# Test profiling decorator
|
||||
@optimizer.profile_decorator
|
||||
def test_function(x):
|
||||
time.sleep(0.01) # Simulate work
|
||||
return x * 2
|
||||
|
||||
result = test_function(5)
|
||||
print(f"Profiled function result: {result}")
|
||||
|
||||
# Test cache
|
||||
cache_decorator = optimizer.create_cache("test_cache", max_size=10)
|
||||
|
||||
@cache_decorator
|
||||
def expensive_function(x):
|
||||
time.sleep(0.1) # Simulate expensive operation
|
||||
return x ** 2
|
||||
|
||||
# First call (cache miss)
|
||||
result1 = expensive_function(5)
|
||||
# Second call (cache hit)
|
||||
result2 = expensive_function(5)
|
||||
|
||||
print(f"Cached function results: {result1}, {result2}")
|
||||
|
||||
# Get performance report
|
||||
report = optimizer.get_performance_report()
|
||||
print(f"Performance report keys: {list(report.keys())}")
|
||||
|
||||
# Identify bottlenecks
|
||||
bottlenecks = optimizer.identify_bottlenecks()
|
||||
print(f"Identified bottlenecks: {len(bottlenecks)}")
|
||||
|
||||
print("Performance Optimizer test completed successfully!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_performance_optimizer()
|
||||
1
src/visualization/__init__.py
Normal file
1
src/visualization/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Visualization package."""
|
||||
1624
src/visualization/building_visualizer.py
Normal file
1624
src/visualization/building_visualizer.py
Normal file
File diff suppressed because it is too large
Load Diff
121
src/visualization/ultra_advanced_visualizer.py
Normal file
121
src/visualization/ultra_advanced_visualizer.py
Normal file
@@ -0,0 +1,121 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import matplotlib.patches as mpatches
|
||||
from matplotlib.colors import ListedColormap, Normalize
|
||||
from matplotlib.patheffects import withStroke
|
||||
import os
|
||||
|
||||
|
||||
def plot_ultra_advanced_coverage_and_aps(
|
||||
floor_plan_img_path,
|
||||
ap_locations,
|
||||
signal_grid,
|
||||
x_coords,
|
||||
y_coords,
|
||||
output_path_prefix,
|
||||
wall_lines=None,
|
||||
room_polygons=None,
|
||||
dpi=400,
|
||||
show=True
|
||||
):
|
||||
"""
|
||||
Ultra-advanced WiFi AP placement and coverage visualization.
|
||||
- floor_plan_img_path: path to floor plan image (JPG/PNG)
|
||||
- ap_locations: dict {APn: (x, y, ...)}
|
||||
- signal_grid: 2D np.ndarray (signal strength)
|
||||
- x_coords, y_coords: 1D arrays for grid axes
|
||||
- output_path_prefix: base path for saving (no extension)
|
||||
- wall_lines: list of ((x1, y1), (x2, y2))
|
||||
- room_polygons: list of [(x1, y1), (x2, y2), ...]
|
||||
- dpi: output resolution
|
||||
- show: whether to display plot interactively
|
||||
"""
|
||||
# Load floor plan image
|
||||
img = plt.imread(floor_plan_img_path)
|
||||
img_extent = [x_coords[0], x_coords[-1], y_coords[0], y_coords[-1]]
|
||||
|
||||
fig, ax = plt.subplots(figsize=(16, 10), dpi=dpi)
|
||||
|
||||
# Plot floor plan
|
||||
ax.imshow(img, extent=img_extent, aspect='auto', alpha=0.6, zorder=0)
|
||||
|
||||
# Plot coverage heatmap
|
||||
cmap = plt.get_cmap('coolwarm')
|
||||
vmin, vmax = -90, -30
|
||||
im = ax.imshow(
|
||||
signal_grid,
|
||||
extent=img_extent,
|
||||
origin='lower',
|
||||
cmap=cmap,
|
||||
alpha=0.55,
|
||||
vmin=vmin,
|
||||
vmax=vmax,
|
||||
zorder=1
|
||||
)
|
||||
|
||||
# Plot walls
|
||||
if wall_lines:
|
||||
for (x1, y1), (x2, y2) in wall_lines:
|
||||
ax.plot([x1, x2], [y1, y2], color='black', linewidth=3, alpha=0.7, zorder=3)
|
||||
|
||||
# Plot rooms
|
||||
if room_polygons:
|
||||
for poly in room_polygons:
|
||||
patch = mpatches.Polygon(poly, closed=True, fill=False, edgecolor='gray', linewidth=2, alpha=0.5, zorder=2)
|
||||
ax.add_patch(patch)
|
||||
|
||||
# AP marker styles
|
||||
ap_colors = [
|
||||
'#e41a1c', '#377eb8', '#4daf4a', '#984ea3', '#ff7f00',
|
||||
'#ffff33', '#a65628', '#f781bf', '#999999', '#66c2a5',
|
||||
'#fc8d62', '#8da0cb', '#e78ac3', '#a6d854', '#ffd92f',
|
||||
]
|
||||
marker_styles = ['o', 's', 'D', '^', 'v', 'P', '*', 'X', 'h', '8']
|
||||
|
||||
# Plot APs
|
||||
for i, (ap_name, ap_coords) in enumerate(ap_locations.items()):
|
||||
x, y = ap_coords[:2]
|
||||
color = ap_colors[i % len(ap_colors)]
|
||||
marker = marker_styles[i % len(marker_styles)]
|
||||
ax.scatter(x, y, s=600, c=color, marker=marker, edgecolors='black', linewidths=2, zorder=10)
|
||||
ax.text(
|
||||
x, y, f'{i+1}',
|
||||
fontsize=22, fontweight='bold', color='white',
|
||||
ha='center', va='center', zorder=11,
|
||||
path_effects=[withStroke(linewidth=4, foreground='black')]
|
||||
)
|
||||
ax.text(
|
||||
x, y-1.5, ap_name,
|
||||
fontsize=13, fontweight='bold', color='black',
|
||||
ha='center', va='top', zorder=12,
|
||||
bbox=dict(boxstyle='round,pad=0.2', fc='white', ec='black', lw=1, alpha=0.8)
|
||||
)
|
||||
|
||||
# Title and labels
|
||||
ax.set_title('Ultra-Advanced WiFi Coverage and AP Placement', fontsize=24, fontweight='bold', pad=20)
|
||||
ax.set_xlabel('X (meters)', fontsize=16)
|
||||
ax.set_ylabel('Y (meters)', fontsize=16)
|
||||
ax.set_xlim(x_coords[0], x_coords[-1])
|
||||
ax.set_ylim(y_coords[0], y_coords[-1])
|
||||
ax.set_aspect('equal')
|
||||
ax.grid(False)
|
||||
|
||||
# Colorbar
|
||||
cbar = fig.colorbar(im, ax=ax, fraction=0.025, pad=0.03)
|
||||
cbar.set_label('Signal Strength (dBm)', fontsize=16)
|
||||
cbar.ax.tick_params(labelsize=14)
|
||||
|
||||
# AP legend
|
||||
legend_handles = [
|
||||
mpatches.Patch(color=ap_colors[i % len(ap_colors)], label=f'{ap_name}')
|
||||
for i, ap_name in enumerate(ap_locations.keys())
|
||||
]
|
||||
ax.legend(handles=legend_handles, title='Access Points', fontsize=13, title_fontsize=15, loc='upper right', bbox_to_anchor=(1.18, 1))
|
||||
|
||||
# Save in multiple formats
|
||||
for ext in ['png', 'svg', 'pdf']:
|
||||
out_path = f'{output_path_prefix}_ultra.{ext}'
|
||||
fig.savefig(out_path, bbox_inches='tight', dpi=dpi)
|
||||
if show:
|
||||
plt.show()
|
||||
plt.close(fig)
|
||||
126
src/visualization/visualizer.py
Normal file
126
src/visualization/visualizer.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from datetime import datetime
|
||||
import os
|
||||
|
||||
class WiFiVisualizer:
|
||||
def __init__(self, output_dir="visualizations"):
|
||||
"""Initialize the WiFi data visualizer.
|
||||
|
||||
Args:
|
||||
output_dir (str): Directory to store visualizations
|
||||
"""
|
||||
self.output_dir = output_dir
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
def create_dashboard(self, data, model_results):
|
||||
"""Create a comprehensive visualization dashboard.
|
||||
|
||||
Args:
|
||||
data (pd.DataFrame): Original data
|
||||
model_results (dict): Results from model training
|
||||
"""
|
||||
print("Creating visualizations...")
|
||||
|
||||
# Create individual plots
|
||||
self._plot_signal_distribution(data)
|
||||
self._plot_signal_over_time(data)
|
||||
self._plot_model_comparison(model_results)
|
||||
self._plot_feature_importance(model_results)
|
||||
self._plot_prediction_accuracy(model_results)
|
||||
|
||||
print(f"Visualizations saved in {self.output_dir}/")
|
||||
|
||||
def _plot_signal_distribution(self, data):
|
||||
"""Plot signal strength distribution."""
|
||||
plt.figure(figsize=(10, 6))
|
||||
sns.histplot(data=data, x='rssi', hue='ssid', multiple="stack")
|
||||
plt.title('Signal Strength Distribution by Access Point')
|
||||
plt.xlabel('RSSI (dBm)')
|
||||
plt.ylabel('Count')
|
||||
plt.savefig(os.path.join(self.output_dir, 'signal_distribution.png'))
|
||||
plt.close()
|
||||
|
||||
def _plot_signal_over_time(self, data):
|
||||
"""Plot signal strength over time."""
|
||||
plt.figure(figsize=(12, 6))
|
||||
for ssid in data['ssid'].unique():
|
||||
ssid_data = data[data['ssid'] == ssid]
|
||||
plt.plot(ssid_data['timestamp'], ssid_data['rssi'], label=ssid, alpha=0.7)
|
||||
plt.title('Signal Strength Over Time')
|
||||
plt.xlabel('Time')
|
||||
plt.ylabel('RSSI (dBm)')
|
||||
plt.legend()
|
||||
plt.xticks(rotation=45)
|
||||
plt.tight_layout()
|
||||
plt.savefig(os.path.join(self.output_dir, 'signal_time_series.png'))
|
||||
plt.close()
|
||||
|
||||
def _plot_model_comparison(self, model_results):
|
||||
"""Plot model performance comparison."""
|
||||
models = list(model_results.keys())
|
||||
rmse_scores = [results['metrics']['rmse'] for results in model_results.values()]
|
||||
r2_scores = [results['metrics']['r2'] for results in model_results.values()]
|
||||
|
||||
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 6))
|
||||
|
||||
# RMSE comparison
|
||||
ax1.bar(models, rmse_scores)
|
||||
ax1.set_title('RMSE by Model')
|
||||
ax1.set_ylabel('RMSE')
|
||||
|
||||
# R² comparison
|
||||
ax2.bar(models, r2_scores)
|
||||
ax2.set_title('R² Score by Model')
|
||||
ax2.set_ylabel('R²')
|
||||
|
||||
plt.tight_layout()
|
||||
plt.savefig(os.path.join(self.output_dir, 'model_comparison.png'))
|
||||
plt.close()
|
||||
|
||||
def _plot_feature_importance(self, model_results):
|
||||
"""Plot feature importance for each model."""
|
||||
for model_name, results in model_results.items():
|
||||
if 'feature_importance' in results:
|
||||
importance_dict = results['feature_importance']
|
||||
features = list(importance_dict.keys())
|
||||
importances = list(importance_dict.values())
|
||||
|
||||
# Sort by absolute importance
|
||||
sorted_idx = np.argsort(np.abs(importances))
|
||||
pos = np.arange(len(features)) + .5
|
||||
|
||||
plt.figure(figsize=(12, len(features)/2))
|
||||
plt.barh(pos, np.array(importances)[sorted_idx])
|
||||
plt.yticks(pos, np.array(features)[sorted_idx])
|
||||
plt.xlabel('Feature Importance')
|
||||
plt.title(f'Feature Importance - {model_name.upper()}')
|
||||
plt.tight_layout()
|
||||
plt.savefig(os.path.join(self.output_dir, f'feature_importance_{model_name}.png'))
|
||||
plt.close()
|
||||
|
||||
def _plot_prediction_accuracy(self, model_results):
|
||||
"""Plot prediction accuracy for each model."""
|
||||
for model_name, results in model_results.items():
|
||||
predictions = results['predictions']
|
||||
actual = results['actual']
|
||||
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.scatter(actual, predictions, alpha=0.5)
|
||||
plt.plot([actual.min(), actual.max()], [actual.min(), actual.max()], 'r--', lw=2)
|
||||
plt.xlabel('Actual Signal Strength (dBm)')
|
||||
plt.ylabel('Predicted Signal Strength (dBm)')
|
||||
plt.title(f'Prediction Accuracy - {model_name.upper()}')
|
||||
|
||||
# Add metrics to plot
|
||||
rmse = results['metrics']['rmse']
|
||||
r2 = results['metrics']['r2']
|
||||
plt.text(0.05, 0.95, f'RMSE: {rmse:.2f}\nR²: {r2:.2f}',
|
||||
transform=plt.gca().transAxes, verticalalignment='top')
|
||||
|
||||
plt.tight_layout()
|
||||
plt.savefig(os.path.join(self.output_dir, f'prediction_accuracy_{model_name}.png'))
|
||||
plt.close()
|
||||
Reference in New Issue
Block a user