AI-powered construction site monitoring using drones
A drone over a construction site once a week provides what 20 cameras can't: a bird's-eye view of the entire facility, the ability to fly over structures, and a 3D model of progress. But "launching a drone and watching a video" isn't monitoring. Monitoring is automated analysis, comparison with the plan, and report generation without human intervention.
Automatic monitoring system
import numpy as np
import cv2
from ultralytics import YOLO
from pathlib import Path
import json
from datetime import datetime
class ConstructionDroneMonitor:
def __init__(self, project_config: dict):
# Детектор для стройплощадки
self.detector = YOLO(project_config['model_path'])
# Классы: crane, excavator, concrete_mixer, scaffolding,
# worker, pile, rebar, formwork, brickwork, concrete_poured
self.equipment_classes = project_config['equipment_classes']
self.material_classes = project_config['material_classes']
# Эталонный план + зоны
self.site_plan = project_config['site_plan']
self.gps_calibration = project_config['gps_calibration']
def process_flight_mission(self, images_dir: str,
flight_log: dict) -> dict:
"""
Обрабатываем серию снимков одного облёта.
flight_log содержит GPS-координаты для каждого снимка.
"""
images = sorted(Path(images_dir).glob('*.jpg'))
all_detections = []
for img_path in images:
frame = cv2.imread(str(img_path))
if frame is None:
continue
img_name = img_path.name
gps = flight_log.get(img_name, {})
results = self.detector(frame, conf=0.4)
img_detections = self._process_detections(
results, frame, gps, str(img_path)
)
all_detections.extend(img_detections)
# Агрегируем по типам и зонам
summary = self._aggregate_detections(all_detections)
return summary
def _process_detections(self, results, frame: np.ndarray,
gps: dict, img_path: str) -> list:
detections = []
for box in results[0].boxes:
cls = self.detector.model.names[int(box.cls)]
bbox = list(map(int, box.xyxy[0]))
conf = float(box.conf)
# Пиксели → GPS-координаты через проективное преобразование
cx = (bbox[0] + bbox[2]) // 2
cy = (bbox[1] + bbox[3]) // 2
lat, lon = self._px_to_gps(cx, cy, gps, frame.shape)
detections.append({
'class': cls,
'confidence': conf,
'bbox': bbox,
'lat': lat,
'lon': lon,
'source_image': img_path
})
return detections
def _px_to_gps(self, px: int, py: int, gps: dict,
frame_shape: tuple) -> tuple:
"""
Преобразование пикселей в GPS.
Упрощённо: для точной геопривязки нужен RTK-GPS + GCP.
"""
if not gps:
return None, None
fov = gps.get('fov', 84) # DJI Phantom 4: 84°
altitude = gps.get('altitude', 50) # метры
drone_lat = gps.get('lat', 0)
drone_lon = gps.get('lon', 0)
h, w = frame_shape[:2]
# Ground footprint
gsd = (altitude * 2 * np.tan(np.radians(fov/2))) / w
dx_m = (px - w/2) * gsd
dy_m = (h/2 - py) * gsd
lat = drone_lat + dy_m / 111320
lon = drone_lon + dx_m / (111320 * np.cos(np.radians(drone_lat)))
return lat, lon
def _aggregate_detections(self, detections: list) -> dict:
summary = {
'equipment_count': {},
'workers_on_site': 0,
'zones_progress': {},
'timestamp': datetime.now().isoformat()
}
for det in detections:
cls = det['class']
if cls in self.equipment_classes:
summary['equipment_count'][cls] = \
summary['equipment_count'].get(cls, 0) + 1
elif cls == 'worker':
summary['workers_on_site'] += 1
return summary
Orthophoto and 3D model of progress
class SiteProgressAnalyzer:
"""Сравнение ортофото текущей недели с предыдущей"""
def compare_orthomap(self, current_path: str,
previous_path: str) -> dict:
current = cv2.imread(current_path)
previous = cv2.imread(previous_path)
if current.shape != previous.shape:
previous = cv2.resize(previous, (current.shape[1], current.shape[0]))
# Semantic segmentation для классификации зон
# Классы: bare_ground, formwork, concrete, rebar, masonry,
# roofing, scaffolding, finished
current_seg = self._segment_site(current)
previous_seg = self._segment_site(previous)
# Прогресс по зонам
progress_by_zone = {}
for zone_name, zone_bbox in self.site_zones.items():
x1, y1, x2, y2 = zone_bbox
curr_zone = current_seg[y1:y2, x1:x2]
prev_zone = previous_seg[y1:y2, x1:x2]
# Изменения в классе стадии строительства
progress_score = self._compute_progress_delta(curr_zone, prev_zone)
progress_by_zone[zone_name] = progress_score
return progress_by_zone
Automatic weekly report
-
Activity heat map: where work is going on, where there is downtime
-
Inventory of equipment: crane #3 has been idle for 3 days
-
Progress by sections: Section B - 78% of the plan, 5 days behind
-
Security Violations: Photo + GPS coordinates
| Indicator |
Typical values |
| Processing time for one flyby (400 photos) |
15–25 min |
| Accuracy of equipment detection |
92–96% |
| Accuracy of workforce counting |
85–93% |
| Georeferencing accuracy (with RTK) |
±10–20 cm |
| Project type |
Term |
| Detection of equipment and workers from the flyby |
4–6 weeks |
| Progress monitoring + weekly reports |
8–14 weeks |
| A complete platform with orthophoto + 3D + BIM |
14–22 weeks |