|
1 | 1 | """ |
2 | | -cli.py |
3 | | -Command-line interface for planet_overlap. |
4 | | -Allows flexible AOI and date inputs, dynamic filters, and output configuration. |
| 2 | +client.py - Entry point for running Planet Overlap analysis. |
| 3 | +
|
| 4 | +This script: |
| 5 | +1. Reads AOI GeoJSON files. |
| 6 | +2. Applies filters (geometry, date, cloud cover, sun angle). |
| 7 | +3. Handles spatial and temporal tiling automatically. |
| 8 | +4. Calls pagination module to fetch imagery. |
| 9 | +5. Calls analysis module to compute overlaps and sun angles. |
| 10 | +6. Stores output to configurable directory. |
| 11 | +
|
| 12 | +Supports multiple AOIs and multiple date ranges. |
5 | 13 | """ |
6 | 14 |
|
7 | | -import argparse |
8 | | -from pathlib import Path |
9 | | -from datetime import datetime |
10 | | -from planet_overlap import geometry, pagination, filters |
11 | | - |
12 | | -def parse_args(): |
13 | | - parser = argparse.ArgumentParser( |
14 | | - description="planet_overlap: Download and analyze PlanetScope imagery for specified AOIs and dates." |
15 | | - ) |
16 | | - |
17 | | - # AOI input |
18 | | - parser.add_argument( |
19 | | - "--aoi", |
20 | | - nargs="+", |
21 | | - required=True, |
22 | | - help="Paths to AOI GeoJSON files or points (lon,lat) separated by space" |
23 | | - ) |
24 | | - |
25 | | - # Date inputs |
26 | | - parser.add_argument( |
27 | | - "--dates", |
28 | | - nargs="+", |
29 | | - required=True, |
30 | | - help=( |
31 | | - "Dates or date ranges. " |
32 | | - "Single date: 2023-06-21 " |
33 | | - "Range: 2023-06-01:2023-06-30 " |
34 | | - "Multiple: 2023-06-01:2023-06-15 2023-07-01:2023-07-15" |
35 | | - ) |
36 | | - ) |
37 | | - |
38 | | - # Output directory |
39 | | - parser.add_argument( |
40 | | - "--output-dir", |
41 | | - type=str, |
42 | | - default="./planet_output", |
43 | | - help="Directory to save outputs" |
44 | | - ) |
45 | | - |
46 | | - # Quality settings |
47 | | - parser.add_argument( |
48 | | - "--max-cloud", |
49 | | - type=float, |
50 | | - default=0.5, |
51 | | - help="Maximum cloud cover fraction (0.0-1.0)" |
52 | | - ) |
53 | | - |
54 | | - parser.add_argument( |
55 | | - "--min-sun-angle", |
56 | | - type=float, |
57 | | - default=0.0, |
58 | | - help="Minimum sun angle (degrees)" |
59 | | - ) |
60 | | - |
61 | | - # Optional buffer for point AOIs |
62 | | - parser.add_argument( |
63 | | - "--point-buffer", |
64 | | - type=float, |
65 | | - default=0.01, |
66 | | - help="Buffer radius for point AOIs in degrees (~1km default)" |
67 | | - ) |
68 | | - |
69 | | - return parser.parse_args() |
70 | | - |
71 | | - |
72 | | -def parse_date_input(date_strings): |
73 | | - """ |
74 | | - Converts CLI date inputs into (start, end) tuples. |
75 | | - Handles single dates, ranges, and multiple ranges. |
76 | | - """ |
77 | | - ranges = [] |
78 | | - for ds in date_strings: |
79 | | - if ":" in ds: |
80 | | - start_str, end_str = ds.split(":") |
81 | | - start = datetime.strptime(start_str, "%Y-%m-%d") |
82 | | - end = datetime.strptime(end_str, "%Y-%m-%d") |
83 | | - else: |
84 | | - start = end = datetime.strptime(ds, "%Y-%m-%d") |
85 | | - ranges.append((start, end)) |
86 | | - return ranges |
87 | | - |
88 | | - |
89 | | -def prepare_aois(aoi_inputs, point_buffer=0.01): |
| 15 | +import os |
| 16 | +import logging |
| 17 | +from typing import List, Optional |
| 18 | + |
| 19 | +from planet_overlap import filters, pagination, analysis, geometry |
| 20 | + |
| 21 | +# Configure logging for progress tracking |
| 22 | +logging.basicConfig( |
| 23 | + level=logging.INFO, |
| 24 | + format="%(asctime)s [%(levelname)s] %(message)s" |
| 25 | +) |
| 26 | + |
| 27 | +def run_client( |
| 28 | + aoi_files: List[str], |
| 29 | + start_dates: List[str], |
| 30 | + end_dates: List[str], |
| 31 | + output_dir: str = "./outputs", |
| 32 | + cloud_max: float = 0.5, |
| 33 | + min_sun_angle: float = 0.0, |
| 34 | + spatial_tile_threshold_km2: float = 10000, |
| 35 | + temporal_tile_threshold_days: int = 30 |
| 36 | +) -> None: |
90 | 37 | """ |
91 | | - Load AOIs and buffer points if needed. |
| 38 | + Main function to execute Planet Overlap workflow. |
| 39 | +
|
| 40 | + Parameters |
| 41 | + ---------- |
| 42 | + aoi_files : List[str] |
| 43 | + Paths to one or more AOI GeoJSON files. |
| 44 | + start_dates : List[str] |
| 45 | + Start dates corresponding to each AOI (format 'YYYY-MM-DD'). |
| 46 | + end_dates : List[str] |
| 47 | + End dates corresponding to each AOI (format 'YYYY-MM-DD'). |
| 48 | + output_dir : str |
| 49 | + Directory where output files will be saved. |
| 50 | + cloud_max : float |
| 51 | + Maximum cloud cover allowed (0-1). |
| 52 | + min_sun_angle : float |
| 53 | + Minimum sun angle allowed in degrees. |
| 54 | + spatial_tile_threshold_km2 : float |
| 55 | + Max AOI area before spatial tiling is applied. |
| 56 | + temporal_tile_threshold_days : int |
| 57 | + Max date range before temporal tiling is applied. |
92 | 58 | """ |
93 | | - polygons = [] |
94 | | - geojson_paths = [] |
95 | | - points = [] |
96 | | - |
97 | | - for item in aoi_inputs: |
98 | | - if "," in item: # assume lon,lat |
99 | | - lon, lat = map(float, item.split(",")) |
100 | | - points.append(geometry.Point(lon, lat)) |
101 | | - else: |
102 | | - geojson_paths.append(item) |
103 | | - |
104 | | - # Load polygons from files |
105 | | - if geojson_paths: |
106 | | - polygons.extend(geometry.load_aoi(geojson_paths)) |
107 | | - |
108 | | - # Buffer points |
109 | | - if points: |
110 | | - polygons.extend(geometry.buffer_points(points, buffer_deg=point_buffer)) |
111 | | - |
112 | | - # Merge all AOIs into a single polygon |
113 | | - final_aoi = geometry.unify_aois(polygons) |
114 | | - return final_aoi |
115 | | - |
116 | | - |
117 | | -def main(): |
118 | | - args = parse_args() |
119 | | - |
120 | | - # Ensure output directory exists |
121 | | - output_dir = Path(args.output_dir) |
122 | | - output_dir.mkdir(parents=True, exist_ok=True) |
123 | | - |
124 | | - # Prepare AOIs |
125 | | - final_aoi = prepare_aois(args.aoi, point_buffer=args.point_buffer) |
126 | | - |
127 | | - # Parse dates |
128 | | - date_ranges = parse_date_input(args.dates) |
129 | | - |
130 | | - # Log configuration |
131 | | - print(f"Output directory: {output_dir}") |
132 | | - print(f"Max cloud: {args.max_cloud}, Min sun angle: {args.min_sun_angle}") |
133 | | - print(f"AOI area (deg²): {final_aoi.area}") |
134 | | - print(f"Date ranges: {date_ranges}") |
135 | | - |
136 | | - # Determine if tiling is needed |
137 | | - total_days = sum((end - start).days + 1 for start, end in date_ranges) |
138 | | - spatial_tile, temporal_tile = pagination.should_tile(final_aoi.area, total_days) |
139 | | - print(f"Spatial tiling: {spatial_tile}, Temporal tiling: {temporal_tile}") |
140 | | - |
141 | | - # Build filters dynamically |
142 | | - search_filters = filters.build_filters([final_aoi], date_ranges, |
143 | | - max_cloud=args.max_cloud, |
144 | | - min_sun_angle=args.min_sun_angle) |
145 | | - |
146 | | - print("Filters prepared and ready for pagination and analysis.") |
147 | 59 |
|
| 60 | + if not os.path.exists(output_dir): |
| 61 | + os.makedirs(output_dir) |
| 62 | + logging.info(f"Created output directory: {output_dir}") |
| 63 | + |
| 64 | + # Read and buffer AOIs |
| 65 | + logging.info("Reading AOIs...") |
| 66 | + aois = [] |
| 67 | + for file in aoi_files: |
| 68 | + try: |
| 69 | + aoi_geom = geometry.read_geojson(file) |
| 70 | + buffered_aoi = geometry.buffer_points(aoi_geom) |
| 71 | + aois.append(buffered_aoi) |
| 72 | + except Exception as e: |
| 73 | + logging.error(f"Failed to read or buffer AOI {file}: {e}") |
| 74 | + continue |
| 75 | + |
| 76 | + # Loop through each AOI and date range |
| 77 | + for i, aoi in enumerate(aois): |
| 78 | + start = start_dates[i] |
| 79 | + end = end_dates[i] |
| 80 | + |
| 81 | + logging.info(f"Processing AOI {i+1}/{len(aois)}: {file}, {start} to {end}") |
| 82 | + |
| 83 | + # Build filters |
| 84 | + geo_filter = filters.geometry_filter(aoi) |
| 85 | + date_filter = filters.date_filter(start, end) |
| 86 | + cloud_filter = filters.cloud_filter(cloud_max) |
| 87 | + sun_filter = filters.sun_angle_filter(min_sun_angle) |
| 88 | + |
| 89 | + combined_filter = filters.combine_filters( |
| 90 | + [geo_filter, date_filter, cloud_filter, sun_filter] |
| 91 | + ) |
148 | 92 |
|
149 | | -if __name__ == "__main__": |
150 | | - main() |
| 93 | + # Determine if tiling is needed |
| 94 | + area_km2 = geometry.compute_area_km2(aoi) |
| 95 | + date_range_days = filters.compute_date_range_days(start, end) |
| 96 | + |
| 97 | + spatial_tiles = [aoi] |
| 98 | + temporal_tiles = [(start, end)] |
| 99 | + |
| 100 | + if area_km2 > spatial_tile_threshold_km2: |
| 101 | + spatial_tiles = geometry.spatial_tile(aoi) |
| 102 | + logging.info(f"AOI exceeds {spatial_tile_threshold_km2} km², applying spatial tiling: {len(spatial_tiles)} tiles") |
| 103 | + |
| 104 | + if date_range_days > temporal_tile_threshold_days: |
| 105 | + temporal_tiles = filters.temporal_tile(start, end) |
| 106 | + logging.info(f"Date range exceeds {temporal_tile_threshold_days} days, applying temporal tiling: {len(temporal_tiles)} intervals") |
| 107 | + |
| 108 | + # Fetch imagery and analyze for each tile |
| 109 | + for s_tile in spatial_tiles: |
| 110 | + for t_start, t_end in temporal_tiles: |
| 111 | + logging.info(f"Fetching imagery for tile and date range: {t_start} to {t_end}") |
| 112 | + try: |
| 113 | + items = pagination.fetch_items( |
| 114 | + geometry=s_tile, |
| 115 | + start_date=t_start, |
| 116 | + end_date=t_end, |
| 117 | + cloud_max=cloud_max |
| 118 | + ) |
| 119 | + analysis_results = analysis.compute_overlap(items, min_sun_angle) |
| 120 | + analysis.save_results(analysis_results, output_dir) |
| 121 | + logging.info(f"Saved results for tile ({t_start}-{t_end})") |
| 122 | + except Exception as e: |
| 123 | + logging.error(f"Failed processing tile/date {t_start}-{t_end}: {e}") |
| 124 | + continue |
| 125 | + |
| 126 | + logging.info("Planet Overlap workflow completed successfully.") |
0 commit comments