diff --git a/app/Helpers/GeometryHelper.php b/app/Helpers/GeometryHelper.php index c0ca8e1e1..a105bbf35 100755 --- a/app/Helpers/GeometryHelper.php +++ b/app/Helpers/GeometryHelper.php @@ -369,4 +369,41 @@ public static function getSitePolygonsOfPolygons(array $polygonUuids) { return SitePolygon::whereIn('poly_id', $polygonUuids)->where('is_active', true)->get()->pluck('uuid'); } + + public static function getMonitoredPolygonsGeojson($polygonUuid) + { + $polygonGeometry = PolygonGeometry::where('uuid', $polygonUuid) + ->select('uuid', DB::raw('ST_AsGeoJSON(geom) AS geojsonGeometry')) + ->first(); + + return [ + 'geometry' => $polygonGeometry, + 'site_polygon_id' => $polygonGeometry->sitePolygon->id, + ]; + } + + public static function getPolygonGeojson($uuid) + { + $polygonGeometry = PolygonGeometry::where('uuid', $uuid) + ->select('uuid', DB::raw('ST_AsGeoJSON(geom) AS geojsonGeometry')) + ->first(); + $geometry = json_decode($polygonGeometry->geojsonGeometry, true); + $polygonData = $polygonGeometry->sitePolygon; + + return [ + 'type' => 'Feature', + 'properties' => [ + 'poly_id' => $polygonData->poly_id, + 'poly_name' => $polygonData->poly_name ?? '', + 'plantstart' => $polygonData->plantstart ?? '', + 'plantend' => $polygonData->plantend ?? '', + 'practice' => $polygonData->practice ?? '', + 'target_sys' => $polygonData->target_sys ?? '', + 'distr' => $polygonData->distr ?? '', + 'num_trees' => $polygonData->num_trees ?? '', + 'site_id' => $polygonData->site_id ?? '', + ], + 'geometry' => $geometry, + ]; + } } diff --git a/app/Helpers/RestorationByEcoregionHelper.php b/app/Helpers/RestorationByEcoregionHelper.php new file mode 100755 index 000000000..db82ded76 --- /dev/null +++ b/app/Helpers/RestorationByEcoregionHelper.php @@ -0,0 +1,78 @@ + [ + 'Southeast Australia temperate forests', + 'Madeira-Tapajós moist forests', + 'Tocantins/Pindare moist forests', + 'Tapajós-Xingu moist forests', + 'Mato Grosso seasonal forests', + 'Mato Grosso seasonal forests, Xingu-Tocantins-Araguaia moist forests', + 'Bahia coastal forests', + 'Tonle Sap freshwater swamp forests', + ], + 'afrotropical' => [ + 'Sinú Valley dry forests', + 'Santa Marta montane forests', + 'Atlantic mixed forests', + 'Petén-Veracruz moist forests', + 'Central American Atlantic moist forests', + 'Petén-Veracruz moist forests, Central American Atlantic moist forests', + 'Central American montane forests', + 'Central American Atlantic moist forests, Central American montane forests', + 'Northern Acacia-Commiphora bushlands and thickets', + 'Southern Rift montane forest-grassland mosaic', + 'Sierra Madre de Chiapas moist forests', + 'Iberian sclerophyllous and semi-deciduous forests', + 'Northwest Iberian montane forests', + 'Northwestern Congolian lowland forests', + 'Albertine Rift montane forests', + 'Sahelian Acacia savanna', + 'Northern Congolian forest-savanna mosaic', + 'Nigerian lowland forests', + 'West Sudanian savanna', + 'Northern Congolian forest-savanna mosaic, Northwestern Congolian lowland forests', + 'Eastern Guinean forests', + 'Victoria Basin forest-savanna mosaic', + 'Guinean forest-savanna mosaic', + 'East Sudanian savanna', + 'Central Zambezian Miombo woodlands', + 'Ethiopian montane grasslands and woodlands', + 'Central African mangroves', + ], + 'paleartic' => [ + 'southern-zanzibar-inhambane-coastal-forest-mosaic', + ], + ]; + $formatedValue = []; + foreach ($categoriesFromEcoRegion as $category => $values) { + $formatedValue[$category] = 0; + foreach ($value as $key => $val) { + if (in_array($key, $values)) { + $formatedValue[$category] = round((float) $val, 3); + + break; + } + } + } + + $result = array_filter($formatedValue, function ($val) { + return $val !== 0; + }); + + if (empty($result)) { + return $result; + } + if ($isExport) { + return $result; + } else { + return ['data' => $result]; + } + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/GetIndicatorPolygonStatusController.php b/app/Http/Controllers/V2/MonitoredData/GetIndicatorPolygonStatusController.php new file mode 100644 index 000000000..750ca56c4 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/GetIndicatorPolygonStatusController.php @@ -0,0 +1,51 @@ +where('uuid', $entity->uuid); + } elseif (get_class($entity) == Project::class) { + $query->where('project_id', $entity->project->id); + } + }) + ->select([ + 'id', + 'status', + 'is_active', + ]) + ->where('is_active', 1) + ->get() + ->groupBy('status') + ->map(function ($group) { + return $group->count(); + }); + $statuses = ['draft', 'submitted', 'needs-more-information', 'approved']; + $statusesByCount = []; + + foreach ($statuses as $status) { + if (! isset($sitePolygonGroupByStatus[$status])) { + $statusesByCount[$status] = 0; + } else { + $statusesByCount[$status] = $sitePolygonGroupByStatus[$status]; + } + } + + return response()->json($statusesByCount); + } catch (\Exception $e) { + Log::info($e); + } + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisController.php b/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisController.php new file mode 100644 index 000000000..1fa19c413 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisController.php @@ -0,0 +1,129 @@ + [ + 'relation_name' => 'treeCoverLossIndicator', + 'extra_columns' => '', + ], + 'treeCoverLossFires' => [ + 'relation_name' => 'treeCoverLossIndicator', + ], + 'restorationByStrategy' => [ + 'relation_name' => 'hectaresIndicator', + ], + 'restorationByLandUse' => [ + 'relation_name' => 'hectaresIndicator', + ], + 'restorationByEcoRegion' => [ + 'relation_name' => 'hectaresIndicator', + ], + ]; + + try { + return SitePolygon::whereHas($slugMappings[$slug]['relation_name'], function ($query) use ($slug) { + $query->where('indicator_slug', $slug) + ->where('year_of_analysis', date('Y')); + }) + ->whereHas('site', function ($query) use ($entity) { + if (get_class($entity) == Site::class) { + $query->where('uuid', $entity->uuid); + } elseif (get_class($entity) == Project::class) { + $query->where('project_id', $entity->project->id); + } + }) + ->select([ + 'id', + 'poly_name', + 'status', + 'plantstart', + 'site_id', + 'is_active', + 'poly_id', + 'calc_area', + ]) + ->where('is_active', 1) + ->get() + ->map(function ($polygon) use ($slugMappings, $slug) { + $indicator = $polygon->{$slugMappings[$slug]['relation_name']}() + ->where('indicator_slug', $slug) + ->select([ + 'indicator_slug', + 'year_of_analysis', + 'value', + 'created_at', + ]) + ->first(); + $results = [ + 'id' => $polygon->id, + 'poly_name' => $polygon->poly_name, + 'poly_id' => $polygon->poly_id, + 'site_id' => $polygon->site_id, + 'status' => $polygon->status, + 'plantstart' => $polygon->plantstart, + 'site_name' => $polygon->site->name ?? '', + 'size' => round($polygon->calc_area ?? 0, 3), + 'indicator_slug' => $indicator->indicator_slug, + 'year_of_analysis' => $indicator->year_of_analysis, + 'created_at' => $indicator->created_at, + 'base_line' => $indicator->created_at, + 'data' => [], + ]; + if (str_contains($slug, 'treeCoverLoss')) { + $valueYears = json_decode($indicator->value, true); + $results['data']['2015'] = round((float) $valueYears['2015'], 3); + $results['data']['2016'] = round((float) $valueYears['2016'], 3); + $results['data']['2017'] = round((float) $valueYears['2017'], 3); + $results['data']['2018'] = round((float) $valueYears['2018'], 3); + $results['data']['2019'] = round((float) $valueYears['2019'], 3); + $results['data']['2020'] = round((float) $valueYears['2020'], 3); + $results['data']['2021'] = round((float) $valueYears['2021'], 3); + $results['data']['2022'] = round((float) $valueYears['2022'], 3); + $results['data']['2023'] = round((float) $valueYears['2023'], 3); + $results['data']['2024'] = round((float) $valueYears['2024'], 3); + } + + if ($slug == 'restorationByEcoRegion') { + $values = json_decode($indicator->value, true); + $results = array_merge($results, RestorationByEcoregionHelper::getCategoryEcoRegion($values)); + } + + if ($slug == 'restorationByLandUse' || $slug == 'restorationByStrategy') { + $values = json_decode($indicator->value, true); + $results = array_merge($results, $this->processValuesHectares($values)); + } + + return $results; + }); + } catch (\Exception $e) { + Log::info($e); + } + } + + public function processValuesHectares($values) + { + $separateKeys = []; + foreach ($values as $key => $value) { + $array = explode(',', str_replace('-', '_', $key)); + $arrayTrim = array_map('trim', $array); + foreach ($arrayTrim as $item) { + $separateKeys[$item] = round((float) $value, 3); + } + } + + return ['data' => $separateKeys]; + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisVerifyController.php b/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisVerifyController.php new file mode 100644 index 000000000..8cffc4a44 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisVerifyController.php @@ -0,0 +1,73 @@ + [ + 'relation_name' => 'treeCoverLossIndicator', + 'indicator_title' => 'Tree Cover Loss', + ], + 'treeCoverLossFires' => [ + 'relation_name' => 'treeCoverLossIndicator', + 'indicator_title' => 'Tree Cover Loss from Fire', + ], + 'restorationByStrategy' => [ + 'relation_name' => 'hectaresIndicator', + 'indicator_title' => 'Hectares Under Restoration By Strategy', + ], + 'restorationByLandUse' => [ + 'relation_name' => 'hectaresIndicator', + 'indicator_title' => 'Hectares Under Restoration By Target Land Use System', + ], + 'restorationByEcoRegion' => [ + 'relation_name' => 'hectaresIndicator', + 'indicator_title' => 'Hectares Under Restoration By WWF EcoRegion', + ], + ]; + + try { + $polygonUuids = SitePolygon::whereHas('site', function ($query) use ($entity) { + if (get_class($entity) == Site::class) { + $query->where('uuid', $entity->uuid); + } elseif (get_class($entity) == Project::class) { + $query->where('project_id', $entity->project->id); + } + }) + ->select(['id', 'poly_id', 'is_active']) + ->where('is_active', 1) + ->get() + ->map(function ($polygon) use ($slugMappings, $slug) { + $indicator = $polygon->{$slugMappings[$slug]['relation_name']}() + ->where('indicator_slug', $slug) + ->where('year_of_analysis', date('Y')) + ->where('site_polygon_id', $polygon->id) + ->first(); + if (! $indicator) { + return $polygon->poly_id; + } + + return null; + }) + ->filter(); + if ($polygonUuids->isEmpty()) { + return response()->json(['message' => 'All polygons have already been analyzed to ' . $slugMappings[$slug]['indicator_title']], 200); + } else { + return response()->json($polygonUuids); + + } + } catch (\Exception $e) { + Log::info($e); + } + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/IndicatorEntitySlugExportController.php b/app/Http/Controllers/V2/MonitoredData/IndicatorEntitySlugExportController.php new file mode 100644 index 000000000..88a1301db --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/IndicatorEntitySlugExportController.php @@ -0,0 +1,196 @@ +exportCsv($entity, $slug); + } + + public function exportCsv($entity, $slug) + { + $defaulHeaders = [ + 'poly_name' => 'Polygon Name', + 'size' => 'Size (ha)', + 'site_name' => 'Site Name', + 'status' => 'Status', + 'plantstart' => 'Plant Start Date', + ]; + $treeCoverLossHeaders = [ + ...$defaulHeaders, + '2015' => '2015', + '2016' => '2016', + '2017' => '2017', + '2018' => '2018', + '2019' => '2019', + '2020' => '2020', + '2021' => '2021', + '2022' => '2022', + '2023' => '2023', + '2024' => '2024', + ]; + $restorationByEcoRegionHeaders = [ + ...$defaulHeaders, + 'created_at' => 'Baseline', + 'australasian' => 'Australasian', + 'afrotropical' => 'Afrotropical', + 'palearctic' => 'Palearctic11', + ]; + $restorationByStrategyHeaders = [ + ...$defaulHeaders, + 'created_at' => 'Baseline', + 'tree_planting' => 'Tree Planting', + 'assisted_natural_regeneration' => 'Assisted Natural Regeneration', + 'direct_seeding' => 'Direct Seeding', + ]; + $restorationByLandUseHeaders = [ + ...$defaulHeaders, + 'created_at' => 'Baseline', + 'agroforest' => 'Agroforest', + 'natural_forest' => 'Natural Forest', + 'mangrove' => 'Mangrove', + ]; + $slugMappings = [ + 'treeCoverLoss' => [ + 'relation_name' => 'treeCoverLossIndicator', + 'columns' => $treeCoverLossHeaders, + 'indicator_title' => 'Tree Cover Loss', + ], + 'treeCoverLossFires' => [ + 'relation_name' => 'treeCoverLossIndicator', + 'columns' => $treeCoverLossHeaders, + 'indicator_title' => 'Tree Cover Loss from Fire', + ], + 'restorationByStrategy' => [ + 'relation_name' => 'hectaresIndicator', + 'columns' => $restorationByStrategyHeaders, + 'indicator_title' => 'Hectares Under Restoration By Strategy', + ], + 'restorationByLandUse' => [ + 'relation_name' => 'hectaresIndicator', + 'columns' => $restorationByLandUseHeaders, + 'indicator_title' => 'Hectares Under Restoration By Target Land Use System', + ], + 'restorationByEcoRegion' => [ + 'relation_name' => 'hectaresIndicator', + 'columns' => $restorationByEcoRegionHeaders, + 'indicator_title' => 'Hectares Under Restoration By WWF EcoRegion', + ], + ]; + + $sitePolygonsIndicator = SitePolygon::whereHas($slugMappings[$slug]['relation_name'], function ($query) use ($slug) { + $query->where('indicator_slug', $slug) + ->where('year_of_analysis', date('Y')); + }) + ->whereHas('site', function ($query) use ($entity) { + if (get_class($entity) == Site::class) { + $query->where('uuid', $entity->uuid); + } elseif (get_class($entity) == Project::class) { + $query->where('project_id', $entity->project->id); + } + }) + ->select([ + 'id', + 'poly_name', + 'status', + 'plantstart', + 'site_id', + 'is_active', + 'poly_id', + 'calc_area', + ]) + ->where('is_active', 1) + ->get() + ->map(function ($polygon) use ($slugMappings, $slug) { + $indicator = $polygon->{$slugMappings[$slug]['relation_name']}() + ->where('indicator_slug', $slug) + ->select([ + 'indicator_slug', + 'year_of_analysis', + 'value', + 'created_at', + ]) + ->first(); + $results = [ + 'poly_name' => $polygon->poly_name, + 'status' => $polygon->status, + 'plantstart' => $polygon->plantstart, + 'site_name' => $polygon->site->name ?? '', + 'size' => $polygon->calc_area ?? 0, + 'created_at' => $indicator->created_at, + ]; + if (str_contains($slug, 'treeCoverLoss')) { + $valueYears = json_decode($indicator->value, true); + $results['2015'] = $valueYears['2015']; + $results['2016'] = $valueYears['2016']; + $results['2017'] = (float) $valueYears['2017']; + $results['2018'] = $valueYears['2018']; + $results['2019'] = $valueYears['2019']; + $results['2020'] = $valueYears['2020']; + $results['2021'] = $valueYears['2021']; + $results['2022'] = $valueYears['2022']; + $results['2023'] = $valueYears['2023']; + $results['2024'] = $valueYears['2024']; + } + if ($slug == 'restorationByEcoRegion') { + $values = json_decode($indicator->value, true); + $results = array_merge($results, RestorationByEcoregionHelper::getCategoryEcoRegion($values, true)); + } + if ($slug == 'restorationByLandUse' || $slug == 'restorationByStrategy') { + $values = json_decode($indicator->value, true); + $results = array_merge($results, $this->processValuesHectares($values)); + } + + return $results; + }); + + $filteredIndicators = []; + foreach ($sitePolygonsIndicator as $polygon) { + $filteredIndicator = []; + foreach ($slugMappings[$slug]['columns'] as $key => $label) { + $filteredIndicator[$key] = $polygon[$key] ?? ''; + } + $filteredIndicators[] = $filteredIndicator; + } + + $csv = Writer::createFromString(''); + + $csv->insertOne(array_values($slugMappings[$slug]['columns'])); + + foreach ($filteredIndicators as $filteredIndicator) { + $csv->insertOne(array_values($filteredIndicator)); + } + + $csvContent = $csv->toString(); + + return response($csvContent, 200, [ + 'Content-Type' => 'text/csv', + 'Content-Disposition' => 'attachment; filename=indicator' . $slugMappings[$slug]['indicator_title'] . '.csv', + ]); + + } + + public function processValuesHectares($values) + { + $separateKeys = []; + foreach ($values as $key => $value) { + $array = explode(',', str_replace('-', '_', $key)); + $arrayTrim = array_map('trim', $array); + foreach ($arrayTrim as $item) { + $separateKeys[$item] = round((float) $value, 3); + } + } + + return $separateKeys; + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/RunIndicatorAnalysisController.php b/app/Http/Controllers/V2/MonitoredData/RunIndicatorAnalysisController.php new file mode 100644 index 000000000..8fa5669c7 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/RunIndicatorAnalysisController.php @@ -0,0 +1,40 @@ +all(); + $binary_data = Redis::get('run:indicator|'.$slug.'|'.json_encode($requestData['uuids'])); + Log::info($binary_data); + if (! $binary_data) { + $delayedJob = DelayedJob::create(); + $job = new RunIndicatorAnalysisJob( + $delayedJob->id, + $requestData, + $slug + ); + dispatch($job); + + return (new DelayedJobResource($delayedJob))->additional(['message' => 'Analysis for '.$slug.' is being processed']); + } else { + return response()->json(['message' => 'Analysis for '.$slug.' is already processed'], 200); + } + } catch (\Exception $e) { + Log::error('Error during analysis for ' . $slug . ' : ' . $e->getMessage()); + + return response()->json(['error' => 'An error occurred during analysis for ' . $slug], 500); + } + } +} diff --git a/app/Jobs/RunIndicatorAnalysisJob.php b/app/Jobs/RunIndicatorAnalysisJob.php new file mode 100644 index 000000000..aabc716d7 --- /dev/null +++ b/app/Jobs/RunIndicatorAnalysisJob.php @@ -0,0 +1,63 @@ +delayed_job_id = $delayed_job_id; + $this->request = $request; + $this->slug = $slug; + } + + public function handle(RunIndicatorAnalysisService $runIndicatorAnalysisService) + { + try { + $delayedJob = DelayedJob::findOrFail($this->delayed_job_id); + + $binary_data = $runIndicatorAnalysisService->run($this->request, $this->slug); + Redis::set('run:indicator|'.$this->slug.'|'.json_encode($this->request['uuids']), $binary_data); + + $delayedJob->update([ + 'status' => DelayedJob::STATUS_SUCCEEDED, + 'payload' => ['message' => 'Analysis completed'], + 'status_code' => Response::HTTP_OK, + ]); + + } catch (Exception $e) { + Log::error('Error in the analysis: ' . $e->getMessage()); + + DelayedJob::where('id', $this->delayed_job_id)->update([ + 'status' => DelayedJob::STATUS_FAILED, + 'payload' => json_encode(['error' => $e->getMessage()]), + 'status_code' => Response::HTTP_INTERNAL_SERVER_ERROR, + ]); + } + } +} diff --git a/app/Models/V2/MonitoredData/IndicatorHectares.php b/app/Models/V2/MonitoredData/IndicatorHectares.php new file mode 100644 index 000000000..55deef5f4 --- /dev/null +++ b/app/Models/V2/MonitoredData/IndicatorHectares.php @@ -0,0 +1,29 @@ +belongsTo(SitePolygon::class, 'site_polygon_id', 'id'); + } +} diff --git a/app/Models/V2/MonitoredData/IndicatorTreeCoverLoss.php b/app/Models/V2/MonitoredData/IndicatorTreeCoverLoss.php new file mode 100644 index 000000000..599103c18 --- /dev/null +++ b/app/Models/V2/MonitoredData/IndicatorTreeCoverLoss.php @@ -0,0 +1,29 @@ +belongsTo(SitePolygon::class, 'site_polygon_id', 'id'); + } +} diff --git a/app/Models/V2/Sites/SitePolygon.php b/app/Models/V2/Sites/SitePolygon.php index 9f60a7715..a41191fd3 100644 --- a/app/Models/V2/Sites/SitePolygon.php +++ b/app/Models/V2/Sites/SitePolygon.php @@ -5,6 +5,8 @@ use App\Models\Traits\HasUuid; use App\Models\V2\AuditableModel; use App\Models\V2\AuditStatus\AuditStatus; +use App\Models\V2\MonitoredData\IndicatorHectares; +use App\Models\V2\MonitoredData\IndicatorTreeCoverLoss; use App\Models\V2\PointGeometry; use App\Models\V2\PolygonGeometry; use App\Models\V2\Projects\Project; @@ -98,6 +100,16 @@ public function auditStatuses(): MorphMany return $this->morphMany(AuditStatus::class, 'auditable'); } + public function hectaresIndicator() + { + return $this->hasMany(IndicatorHectares::class, 'site_polygon_id'); + } + + public function treeCoverLossIndicator() + { + return $this->hasMany(IndicatorTreeCoverLoss::class, 'site_polygon_id'); + } + public function getAuditableNameAttribute(): string { return $this->poly_name ?? ''; diff --git a/app/Services/PythonService.php b/app/Services/PythonService.php index 46146e6cc..d425cf2af 100644 --- a/app/Services/PythonService.php +++ b/app/Services/PythonService.php @@ -85,6 +85,50 @@ public function clipPolygons($geojson): ?array return $result; } + public function IndicatorPolygon($geojson, $indicator_name, $api_key) + { + $inputGeojson = $this->getTemporaryFile('input.geojson'); + $outputGeojson = $this->getTemporaryFile('output.geojson'); + + $writeHandle = fopen($inputGeojson, 'w'); + + try { + fwrite($writeHandle, json_encode($geojson)); + } finally { + fclose($writeHandle); + } + + $process = new Process(['python3', base_path() . '/resources/python/polygon-indicator/app.py', $inputGeojson, $outputGeojson, $indicator_name, $api_key]); + + $stdout = ''; + $stderr = ''; + + $process->run(function ($type, $buffer) use (&$stdout, &$stderr) { + if (Process::ERR === $type) { + $stderr .= $buffer; + } else { + $stdout .= $buffer; + } + }); + + if (! $process->isSuccessful()) { + Log::error('Error running indicator script: ' . $stderr); + + return null; + } + + if (! empty($stderr)) { + Log::warning('Python script warnings/errors: ' . $stderr); + } + + $result = json_decode(file_get_contents($outputGeojson), true); + + unlink($inputGeojson); + unlink($outputGeojson); + + return $result; + } + protected function getTemporaryFile(string $prefix): string { return tempnam(sys_get_temp_dir(), $prefix); diff --git a/app/Services/RunIndicatorAnalysisService.php b/app/Services/RunIndicatorAnalysisService.php new file mode 100644 index 000000000..67843e8fc --- /dev/null +++ b/app/Services/RunIndicatorAnalysisService.php @@ -0,0 +1,185 @@ + [ + 'sql' => 'SELECT umd_tree_cover_loss__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss__year', + 'query_url' => '/dataset/umd_tree_cover_loss/latest/query', + 'indicator' => 'umd_tree_cover_loss', + 'model' => IndicatorTreeCoverLoss::class, + 'table_name' => 'indicator_output_tree_cover_loss', + ], + 'treeCoverLossFires' => [ + 'sql' => 'SELECT umd_tree_cover_loss_from_fires__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss_from_fires__year', + 'query_url' => '/dataset/umd_tree_cover_loss_from_fires/latest/query', + 'indicator' => 'umd_tree_cover_loss_from_fires', + 'model' => IndicatorTreeCoverLoss::class, + 'table_name' => 'indicator_output_tree_cover_loss', + ], + 'restorationByEcoRegion' => [ + 'indicator' => 'wwf_terrestrial_ecoregions', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + 'restorationByStrategy' => [ + 'indicator' => 'restoration_practice', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + 'restorationByLandUse' => [ + 'indicator' => 'target_system', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + ]; + + if (! isset($slugMappings[$slug])) { + return response()->json(['message' => 'Slug Not Found'], 400); + } + foreach ($request['uuids'] as $uuid) { + $polygonGeometry = $this->getGeometry($uuid); + $registerExist = DB::table($slugMappings[$slug]['table_name'].' as i') + ->where('i.site_polygon_id', $polygonGeometry['site_polygon_id']) + ->where('i.indicator_slug', $slug) + ->where('i.year_of_analysis', Carbon::now()->year) + ->exists(); + + if ($registerExist) { + continue; + } + + if (str_contains($slug, 'restorationBy')) { + $geojson = GeometryHelper::getPolygonGeojson($uuid); + $indicatorRestorationResponse = App::make(PythonService::class)->IndicatorPolygon($geojson, $slugMappings[$slug]['indicator'], getenv('GFW_SECRET_KEY')); + + if ($slug == 'restorationByEcoRegion') { + $value = json_encode($indicatorRestorationResponse['area'][$slugMappings[$slug]['indicator']]); + } else { + $value = $this->formatKeysValues($indicatorRestorationResponse['area'][$slugMappings[$slug]['indicator']]); + } + $data = [ + 'indicator_slug' => $slug, + 'site_polygon_id' => $polygonGeometry['site_polygon_id'], + 'year_of_analysis' => Carbon::now()->year, + 'value' => $value, + ]; + $slugMappings[$slug]['model']::create($data); + + continue; + } + + $response = $this->sendApiRequestIndicator(getenv('GFW_SECRET_KEY'), $slugMappings[$slug]['query_url'], $slugMappings[$slug]['sql'], $polygonGeometry['geo']); + if (str_contains($slug, 'treeCoverLoss')) { + $processedTreeCoverLossValue = $this->processTreeCoverLossValue($response->json()['data']); + } + + if ($response->successful()) { + if (str_contains($slug, 'treeCoverLoss')) { + $data = $this->generateTreeCoverLossData($processedTreeCoverLossValue, $slug, $polygonGeometry); + } else { + $data = [ + 'indicator_slug' => $slug, + 'site_polygon_id' => $polygonGeometry['site_polygon_id'], + 'year_of_analysis' => Carbon::now()->year, + 'value' => json_encode($response->json()['data']), + ]; + } + + $slugMappings[$slug]['model']::create($data); + } else { + Log::error('A problem occurred during the analysis of the geometry for the polygon: ' . $uuid); + } + } + + return response()->json(['message' => 'Analysis completed']); + } catch (\Exception $e) { + Log::info($e); + + return response()->json([ + 'message' => 'An error occurred during the analysis', + 'error' => $e->getMessage(), + ], 500); + } + } + + public function getGeometry($polygonUuid) + { + $geojson = GeometryHelper::getMonitoredPolygonsGeojson($polygonUuid); + $geoJsonObject = json_decode($geojson['geometry']->geojsonGeometry, true); + + return [ + 'geo' => [ + 'type' => 'Polygon', + 'coordinates' => $geoJsonObject['coordinates'], + ], + 'site_polygon_id' => $geojson['site_polygon_id'], + ]; + } + + public function sendApiRequestIndicator($secret_key, $query_url, $query_sql, $geometry) + { + return Http::withHeaders([ + 'content-type' => 'application/json', + 'x-api-key' => $secret_key, + ])->post('https://data-api.globalforestwatch.org' . $query_url, [ + 'sql' => $query_sql, + 'geometry' => $geometry, + ]); + } + + public function processTreeCoverLossValue($data) + { + $processedTreeCoverLossValue = []; + foreach ($data as $i) { + $processedTreeCoverLossValue[$i['umd_tree_cover_loss__year']] = $i['area__ha']; + } + + return $processedTreeCoverLossValue; + } + + public function generateTreeCoverLossData($processedTreeCoverLossValue, $slug, $polygonGeometry) + { + $yearsOfAnalysis = [2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024]; + $responseData = []; + foreach ($yearsOfAnalysis as $year) { + if (isset($processedTreeCoverLossValue[$year])) { + $responseData[$year] = $processedTreeCoverLossValue[$year]; + } else { + $responseData[$year] = 0.0; + } + } + + return [ + 'indicator_slug' => $slug, + 'site_polygon_id' => $polygonGeometry['site_polygon_id'], + 'year_of_analysis' => Carbon::now()->year, + 'value' => json_encode($responseData), + ]; + } + + public function formatKeysValues($data) + { + $formattedData = []; + foreach ($data as $key => $value) { + $formattedKey = strtolower(str_replace(' ', '-', $key)); + $formattedData[$formattedKey] = $value; + } + + return json_encode($formattedData); + } +} diff --git a/openapi-src/V2/definitions/IndicatorPolygonsStatus.yml b/openapi-src/V2/definitions/IndicatorPolygonsStatus.yml new file mode 100644 index 000000000..47807cbc0 --- /dev/null +++ b/openapi-src/V2/definitions/IndicatorPolygonsStatus.yml @@ -0,0 +1,12 @@ + +type: object +properties: + draft: + type: integer + submitted: + type: integer + approved: + type: integer + needs-more-information: + type: integer + \ No newline at end of file diff --git a/openapi-src/V2/definitions/IndicatorPost.yml b/openapi-src/V2/definitions/IndicatorPost.yml new file mode 100644 index 000000000..630ba2b58 --- /dev/null +++ b/openapi-src/V2/definitions/IndicatorPost.yml @@ -0,0 +1,6 @@ +type: object +properties: + uuids: + type: array + items: + type: string \ No newline at end of file diff --git a/openapi-src/V2/definitions/Indicators.yml b/openapi-src/V2/definitions/Indicators.yml new file mode 100644 index 000000000..a880fc31f --- /dev/null +++ b/openapi-src/V2/definitions/Indicators.yml @@ -0,0 +1,45 @@ + +type: object +properties: + id: + type: integer + poly_name: + type: string + status: + type: string + plantstart: + type: string + format: date + site_name: + type: string + size: + type: interger + created_at: + type: string + format: date + indicator_slug: + type: string + year_of_analysis: + type: integer + value: + type: object + 2015: + type: number + 2016: + type: number + 2017: + type: number + 2018: + type: number + 2019: + type: number + 2020: + type: number + 2021: + type: number + 2022: + type: number + 2023: + type: number + 2024: + type: number \ No newline at end of file diff --git a/openapi-src/V2/definitions/_index.yml b/openapi-src/V2/definitions/_index.yml index 3e84461b9..e67305c62 100644 --- a/openapi-src/V2/definitions/_index.yml +++ b/openapi-src/V2/definitions/_index.yml @@ -396,3 +396,9 @@ UserCreateComplete: $ref: './UserCreateComplete.yml' V2AdminProjectUpdate: $ref: './V2AdminProjectUpdate.yml' +IndicatorPost: + $ref: './IndicatorPost.yml' +Indicators: + $ref: './Indicators.yml' +IndicatorPolygonsStatus: + $ref: './IndicatorPolygonsStatus.yml' diff --git a/openapi-src/V2/paths/MonitoredData/get-v2-indicators-entity-slug-export.yml b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-entity-slug-export.yml new file mode 100644 index 000000000..2b956dfb5 --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-entity-slug-export.yml @@ -0,0 +1,32 @@ +operationId: get-v2-indicators-entity-slug-export +summary: Export CSV document of indicators for a specific entity and slug +tags: + - Export + - V2 Indicators +parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true +produces: + - text/plain +responses: + '200': + description: OK + schema: + type: file + '400': + description: Bad request + '500': + description: Internal server error \ No newline at end of file diff --git a/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-analysis.yml b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-analysis.yml new file mode 100644 index 000000000..ea1a12d4c --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-analysis.yml @@ -0,0 +1,31 @@ +operationId: get-v2-indicators-polygon-indicator-analysis +summary: get all indicators from the polygon indicator analysis +tags: + - V2 Indicators +parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true +responses: + '200': + description: OK + schema: + type: array + items: + $ref: '../../definitions/_index.yml#/Indicators' + '400': + description: Bad request + '500': + description: Internal server error \ No newline at end of file diff --git a/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-status.yml b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-status.yml new file mode 100644 index 000000000..636048775 --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-status.yml @@ -0,0 +1,26 @@ +operationId: get-v2-indicators-polygon-indicator-status +summary: get all indicators from the polygon indicator status +tags: + - V2 Indicators +parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true +responses: + '200': + description: OK + schema: + type: array + items: + $ref: '../../definitions/_index.yml#/IndicatorPolygonsStatus' + '400': + description: Bad request + '500': + description: Internal server error \ No newline at end of file diff --git a/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-verify.yml b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-verify.yml new file mode 100644 index 000000000..7fab4e236 --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-verify.yml @@ -0,0 +1,29 @@ +operationId: get-v2-indicators-polygon-indicator-verify +summary: get all indicators from the polygon indicator verify +tags: + - V2 Indicators +parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true +responses: + '200': + description: OK + schema: + type: array + '400': + description: Bad request + '500': + description: Internal server error \ No newline at end of file diff --git a/openapi-src/V2/paths/MonitoredData/post-v2-indicators-polygon-indicator-analysis.yml b/openapi-src/V2/paths/MonitoredData/post-v2-indicators-polygon-indicator-analysis.yml new file mode 100644 index 000000000..c28391ca2 --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/post-v2-indicators-polygon-indicator-analysis.yml @@ -0,0 +1,26 @@ +operationId: post-v2-indicators-polygon-indicator-analysis +summary: Create a new indicator in the polygon indicator analysis +tags: + - V2 Indicators +parameters: + - in: path + name: slug + type: string + description: Optional. Filter counts and metrics by slug. + required: true + - name: body + in: body + required: true + schema: + $ref: '../../definitions/_index.yml#/IndicatorPost' +responses: + '201': + description: Created + schema: + type: object + properties: + uuids: + type: array + items: + type: string + description: A list of processed polygons diff --git a/openapi-src/V2/paths/_index.yml b/openapi-src/V2/paths/_index.yml index fa643036d..e24924bf6 100644 --- a/openapi-src/V2/paths/_index.yml +++ b/openapi-src/V2/paths/_index.yml @@ -2821,3 +2821,18 @@ /v2/terrafund/validation/polygons: post: $ref: './Terrafund/post-v2-terrafund-validation-polygons.yml' +/v2/indicators/{entity}/{uuid}/{slug}: + get: + $ref: './MonitoredData/get-v2-indicators-polygon-indicator-analysis.yml' +/v2/indicators/{slug}: + post: + $ref: './MonitoredData/post-v2-indicators-polygon-indicator-analysis.yml' +/v2/indicators/{entity}/{uuid}: + get: + $ref: './MonitoredData/get-v2-indicators-polygon-indicator-status.yml' +/v2/indicators/{entity}/{uuid}/{slug}/verify: + get: + $ref: './MonitoredData/get-v2-indicators-polygon-indicator-verify.yml' +/v2/indicators/{entity}/{uuid}/{slug}/export: + get: + $ref: './MonitoredData/get-v2-indicators-entity-slug-export.yml' diff --git a/resources/docs/swagger-v2.yml b/resources/docs/swagger-v2.yml index 6ffff9c2a..bf09d2730 100644 --- a/resources/docs/swagger-v2.yml +++ b/resources/docs/swagger-v2.yml @@ -44396,6 +44396,69 @@ definitions: properties: is_test: type: boolean + IndicatorPost: + type: object + properties: + uuids: + type: array + items: + type: string + Indicators: + type: object + properties: + '2015': + type: number + '2016': + type: number + '2017': + type: number + '2018': + type: number + '2019': + type: number + '2020': + type: number + '2021': + type: number + '2022': + type: number + '2023': + type: number + '2024': + type: number + id: + type: integer + poly_name: + type: string + status: + type: string + plantstart: + type: string + format: date + site_name: + type: string + size: + type: interger + created_at: + type: string + format: date + indicator_slug: + type: string + year_of_analysis: + type: integer + value: + type: object + IndicatorPolygonsStatus: + type: object + properties: + draft: + type: integer + submitted: + type: integer + approved: + type: integer + needs-more-information: + type: integer paths: '/v2/tree-species/{entity}/{UUID}': get: @@ -99229,3 +99292,214 @@ paths: message: type: string description: A message indicating the completion of validation for all site polygons. + '/v2/indicators/{entity}/{uuid}/{slug}': + get: + operationId: get-v2-indicators-polygon-indicator-analysis + summary: get all indicators from the polygon indicator analysis + tags: + - V2 Indicators + parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true + responses: + '200': + description: OK + schema: + type: array + items: + type: object + properties: + '2015': + type: number + '2016': + type: number + '2017': + type: number + '2018': + type: number + '2019': + type: number + '2020': + type: number + '2021': + type: number + '2022': + type: number + '2023': + type: number + '2024': + type: number + id: + type: integer + poly_name: + type: string + status: + type: string + plantstart: + type: string + format: date + site_name: + type: string + size: + type: interger + created_at: + type: string + format: date + indicator_slug: + type: string + year_of_analysis: + type: integer + value: + type: object + '400': + description: Bad request + '500': + description: Internal server error + '/v2/indicators/{slug}': + post: + operationId: post-v2-indicators-polygon-indicator-analysis + summary: Create a new indicator in the polygon indicator analysis + tags: + - V2 Indicators + parameters: + - in: path + name: slug + type: string + description: Optional. Filter counts and metrics by slug. + required: true + - name: body + in: body + required: true + schema: + type: object + properties: + uuids: + type: array + items: + type: string + responses: + '201': + description: Created + schema: + type: object + properties: + uuids: + type: array + items: + type: string + description: A list of processed polygons + '/v2/indicators/{entity}/{uuid}': + get: + operationId: get-v2-indicators-polygon-indicator-status + summary: get all indicators from the polygon indicator status + tags: + - V2 Indicators + parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + responses: + '200': + description: OK + schema: + type: array + items: + type: object + properties: + draft: + type: integer + submitted: + type: integer + approved: + type: integer + needs-more-information: + type: integer + '400': + description: Bad request + '500': + description: Internal server error + '/v2/indicators/{entity}/{uuid}/{slug}/verify': + get: + operationId: get-v2-indicators-polygon-indicator-verify + summary: get all indicators from the polygon indicator verify + tags: + - V2 Indicators + parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true + responses: + '200': + description: OK + schema: + type: array + '400': + description: Bad request + '500': + description: Internal server error + '/v2/indicators/{entity}/{uuid}/{slug}/export': + get: + operationId: get-v2-indicators-entity-slug-export + summary: Export CSV document of indicators for a specific entity and slug + tags: + - Export + - V2 Indicators + parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true + produces: + - text/plain + responses: + '200': + description: OK + schema: + type: file + '400': + description: Bad request + '500': + description: Internal server error diff --git a/resources/python/polygon-indicator/app.py b/resources/python/polygon-indicator/app.py new file mode 100644 index 000000000..94235376a --- /dev/null +++ b/resources/python/polygon-indicator/app.py @@ -0,0 +1,85 @@ +import json +import sys + +import requests +import yaml +import gfw_api as gfw +import tree_cover_indicator as ttc +from fiona.model import Geometry, Feature, Properties + + +def generate_indicator(feature, indicator_name, params, session=None): + if params["indicators"][indicator_name]["data_source"] == "gfw": + supported_layers = gfw.get_supported_gfw_layer() + if indicator_name not in supported_layers.keys(): + raise gfw.UnsupportedGFWLayer(indicator_name) + polygon_gfw_data = gfw.get_gfw_data( + feature.geometry, session, indicator_name, params + ) + if params["indicators"][indicator_name]["area_source"] == "gfw": + key_label = params["indicators"][indicator_name]["key_label"] + key_value = params["indicators"][indicator_name]["key_value"] + polygon_data = { + row[key_label]: row[key_value] for row in polygon_gfw_data["data"] + } + else: + key_label = params["indicators"][indicator_name]["key_label"] + polygon_data = { + row[key_label]: ttc.calculate_area(feature) + for row in polygon_gfw_data["data"] + } + elif params["indicators"][indicator_name]["data_source"] == "polygon": + polygon_data = { + feature.properties[ + params["indicators"][indicator_name]["polygon_key"] + ]: ttc.calculate_area(feature) + } + + if params["indicators"][indicator_name]["zero_fill"]: + values = {} + for year in range( + params["indicators"][indicator_name]["start_year"], + params["indicators"][indicator_name]["end_year"] + 1, + ): + values[year] = polygon_data.get(year, 0.0) + indicator_results = {indicator_name: values} + else: + indicator_results = {indicator_name: polygon_data} + return indicator_results + +def main(): + input_geojson = sys.argv[1] + output_geojson = sys.argv[2] + indicator_name = sys.argv[3] + api_key = sys.argv[4] + + with open(input_geojson, "r") as f: + geojson_data = json.load(f) + + config_path = "resources/python/polygon-indicator/config.yaml" + with open(config_path) as conf_file: + config = yaml.safe_load(conf_file) + + with requests.Session() as session: + session.headers = { + "content-type": "application/json", + "x-api-key": f"{api_key}", + } + + fiona_feature = Feature( + geometry=Geometry( + type=geojson_data["geometry"]["type"], + coordinates=geojson_data["geometry"]["coordinates"] + ), + properties=Properties(**geojson_data["properties"]) + ) + if (indicator_name == "wwf_terrestrial_ecoregions"): + result = generate_indicator(fiona_feature, indicator_name, config, session) + else: + result = generate_indicator(fiona_feature, indicator_name, config) + + with open(output_geojson, 'w') as f: + json.dump({'area': result}, f) + +if __name__ == "__main__": + main() diff --git a/resources/python/polygon-indicator/config.yaml b/resources/python/polygon-indicator/config.yaml new file mode 100644 index 000000000..e75b39874 --- /dev/null +++ b/resources/python/polygon-indicator/config.yaml @@ -0,0 +1,164 @@ +base: + config_path: "secrets.yaml" + temp_path: "temp/" + log_level: DEBUG + data_path: "data/" + +polygons: + intervention_key: IntervType + area_key: Area_ha + +gfw_api: + base_url: https://data-api.globalforestwatch.org + +s3: + tile_bucket: tof-output + tile_year: 2020 + tile_prefix: /tiles/ + lookup_bucket: tof-output + lookup_prefix: tile-databases + lookup_filelist: + - "tile-databases/america-africa-europe-corrected.csv" + - "tile-databases/asia.csv" + +gdal: + outfile: temp/tiles/mosaic + outfile_suffix: 2020 + +indicators: + umd_tree_cover_loss: + data_source: gfw + start_year: 2018 + end_year: 2024 + query_url: /dataset/umd_tree_cover_loss/latest/query + sql: "SELECT umd_tree_cover_loss__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss__year" + key_label: umd_tree_cover_loss__year + key_value: area__ha + area_source: gfw + zero_fill: True + umd_tree_cover_loss_from_fires: + data_source: gfw + start_year: 2018 + end_year: 2024 + query_url: /dataset/umd_tree_cover_loss_from_fires/latest/query + sql: "SELECT umd_tree_cover_loss_from_fires__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss_from_fires__year" + key_label: umd_tree_cover_loss_from_fires__year + key_value: area__ha + area_source: gfw + zero_fill: True + wwf_terrestrial_ecoregions: + data_source: gfw + query_url: /dataset/wwf_terrestrial_ecoregions/latest/query + sql: "SELECT eco_name FROM results" + key_label: eco_name + area_source: polygon + zero_fill: False + wri_tropical_tree_cover: + data_source: tiles + data_year: 2020 + query_url: /dataset/wri_tropical_tree_cover/latest/query + sql: "SELECT AVG(wri_tropical_tree_cover__percent) FROM results" + key_label: wri_tropical_tree_cover__percent + download_tile_lookup: False + small_sites_area_thresh: 0.5 + shift_value: 0.0001081081 + lulc: + input_path: C3S-LC-L4-LCCS-Map-300m-P1Y-2020-v2.1.1.tif + temp_output_prefix: temp-lulc- + target_crs: 4326 + default_initial_crs: "EPSG:4326" + reproj_crs: 3857 + buffer_size: 500 + gdf_project_key: "Project" + zonal_stats: "count majority" + small_sites_error: + zeroToNine: + min: 0.0 + max: 10.0 + error: 3.6386 + tenToThirtyNine: + min: 10.0 + max: 40.0 + error: 16.68 + fortyTo1Hundred: + min: 40.0 + max: 100.0 + error: 23.468 + land_cover_data: + source: geotiff + query_url: /dataset/esa_land_cover_2015/latest/query + sql: SELECT esa_land_cover_2015__class, esa_land_cover_2015__uint16, Count(*) FROM results GROUP BY esa_land_cover_2015__uint16, esa_land_cover_2015__class ORDER BY count DESC LIMIT 1 + key_label: count + admin_boundary_data: + source: gfw + query_url: /dataset/wri_tropical_tree_cover/latest/query + sql: SELECT gadm_administrative_boundaries__adm0, Count(*) FROM results GROUP BY gadm_administrative_boundaries__adm0 ORDER BY count DESC LIMIT 1 + key_label: gadm_administrative_boundaries__adm0 + lulc_ci_data: + source: csv + path: ttc_supporting_data/lulc_ci.csv + subregion_ci_data: + source: geojson + path: ttc_supporting_data/subregions_conf.geojson + esa_lulc_conversions: + urban: + - 190 + grassland: + - 130 + cropland: + - 10 + - 11 + - 12 + - 20 + agriculture: + - 10 + - 11 + - 12 + - 20 + forest: + - 50 + - 60 + - 61 + - 62 + - 70 + - 71 + - 80 + - 81 + - 82 + - 90 + - 160 + - 170 + mosaic vegetation: + - 30 + - 40 + - 100 + - 110 + mosaic: + - 30 + - 40 + - 100 + - 110 + shrub/scrub/otherland: + - 120 + - 121 + - 122 + - 140 + - 150 + - 151 + - 152 + - 153 + - 200 + - 201 + - 202 + - 0 + - 220 + target_system: + data_source: polygon + area_source: polygon + polygon_key: target_sys + zero_fill: False + restoration_practice: + data_source: polygon + area_source: polygon + polygon_key: practice + zero_fill: False diff --git a/resources/python/polygon-indicator/gfw_api.py b/resources/python/polygon-indicator/gfw_api.py new file mode 100644 index 000000000..7b0ac8a38 --- /dev/null +++ b/resources/python/polygon-indicator/gfw_api.py @@ -0,0 +1,26 @@ +import shapely + +def get_gfw_data(polygon, session, indicator_type, params): + url = f'{params["gfw_api"]["base_url"]}{params["indicators"][indicator_type]["query_url"]}' + sql = params["indicators"][indicator_type]["sql"] + payload = {"sql": sql, "geometry": shapely.geometry.mapping(polygon)} + with session.post(url, json=payload) as response: + if not response.ok: + raise RuntimeError(f"{response.status_code}") + response_data = response.json() + return response_data + + +class UnsupportedGFWLayer(Exception): + def __init__(self, estimator_name): + self.msg = f"Unsupported GFW layer {estimator_name}" + super().__init__(self.msg) + + +def get_supported_gfw_layer(): + return { + "umd_tree_cover_loss": "umd_tree_cover_loss", + "umd_tree_cover_loss_from_fires": "umd_tree_cover_loss_from_fires", + "wwf_terrestrial_ecoregions": "wwf_terrestrial_ecoregions", + "wri_tropical_tree_cover": "wri_tropical_tree_cover", + } diff --git a/resources/python/polygon-indicator/requirements.txt b/resources/python/polygon-indicator/requirements.txt new file mode 100755 index 000000000..7a495d4d3 --- /dev/null +++ b/resources/python/polygon-indicator/requirements.txt @@ -0,0 +1,12 @@ +shapely==2.0.1 +geopandas==1.0.1 +pandas==2.1.3 +requests==2.32.3 +yaml==6.0.2 +fiona==1.10.1 +exactextract==0.2.0 +rasterio==1.4.3 +boto3==1.35.77 +gdal==3.6.2 +pyyaml==6.0.1 +rasterstats==0.20.0 diff --git a/resources/python/polygon-indicator/tree_cover_indicator.py b/resources/python/polygon-indicator/tree_cover_indicator.py new file mode 100644 index 000000000..80090da0f --- /dev/null +++ b/resources/python/polygon-indicator/tree_cover_indicator.py @@ -0,0 +1,164 @@ +import shapely +import pandas as pd +# from rasterstats import zonal_stats +import geopandas as gpd +import os +from shapely.geometry import Polygon, shape, box +from shapely.ops import transform +from exactextract import exact_extract +import rasterio +from pathlib import Path + +import ttc_s3_utils as s3_utils +import ttc_error_utils as error + + +def get_gfw_data(geometry, session, dataset, params): + url = f'{params["gfw_api"]["base_url"]}{params["indicators"]["wri_tropical_tree_cover"][dataset]["query_url"]}' + sql = params["indicators"]["wri_tropical_tree_cover"][dataset]["sql"] + payload = {"sql": sql, "geometry": shapely.geometry.mapping(geometry)} + response = session.post(url, json=payload) + response.raise_for_status() + return response.json() + + +def calculate_area(feature): + geometry = shape(feature["geometry"]) + gdf = gpd.GeoDataFrame(geometry=[geometry], crs="EPSG:4326") + gdf = gdf.to_crs("EPSG:3857") + area_m2 = gdf.geometry.area.values[ + 0 + ] # Directly get the area in square meters as a float + area_ha = area_m2 / 10**4 # Convert to hectares + return area_ha + + +def calculate_tree_cover(feature, project_name, params, logger): + try: + logger.debug("Calculating area...") + area_ha = calculate_area(feature) + logger.debug(f"Area calculated successfully: {area_ha}") + mosaic_file = f"{str(params['gdal']['outfile'])}-{project_name}-{str(params['gdal']['outfile_suffix'])}.tif" + temp_path = Path("temp.geojson") + temp_path.write_text(shapely.to_geojson(shape(feature["geometry"]))) + with rasterio.open(mosaic_file) as mosaic: + # Use exactextract to compute the mean + result = exact_extract( + mosaic, + temp_path, + "mean(min_coverage_frac=0.05, coverage_weight=fraction)", + ) + expected_ttc = result[0]["properties"]["mean"] + + logger.debug(f"Expected tree cover calculated successfully: {expected_ttc}") + + logger.debug("Calculating shift error...") + shift_error = error.calculate_shift_error( + feature, expected_ttc, project_name, params, logger + ) + logger.debug(f"Shift error calculated successfully: {shift_error}") + + logger.debug("Calculating LULC error...") + + lulc_lower_error, lulc_upper_error = error.calculate_lulc_error( + feature, project_name, expected_ttc, params, logger + ) + if lulc_lower_error == float("inf") or lulc_lower_error == float("-inf"): + lulc_lower_error = 0 + if lulc_upper_error == float("inf") or lulc_upper_error == float("-inf"): + lulc_upper_error = 0 + + logger.debug( + f"LULC error calculated successfully: {lulc_lower_error}, {lulc_upper_error}" + ) + + logger.debug("Calculating subregion error...") + subregion, subregion_lower_error, subregion_upper_error = ( + error.calculate_subregion_error(feature, expected_ttc, params, logger) + ) + + logger.debug( + f"Subregion error calculated successfully: {subregion}, {subregion_lower_error}, {subregion_upper_error}" + ) + + logger.debug("Calculating small site error...") + small_site_error = error.get_small_site_error_value( + area_ha, expected_ttc, params, logger + ) + logger.debug(f"Small site error: {small_site_error}") + logger.debug(f"Small site error calculated successfully: {small_site_error}") + + logger.debug("Integrating errors...") + plus, minus, plus_minus_average = error.combine_errors( + expected_ttc, + shift_error, + small_site_error, + lulc_lower_error, + lulc_upper_error, + subregion_lower_error, + subregion_upper_error, + ) + + tree_cover_result = { + "TTC": expected_ttc, + "error_plus": plus, + "error_minus": minus, + "plus_minus_average": plus_minus_average, + "small_site_error": small_site_error, + "lulc_lower_error": lulc_lower_error, + "lulc_upper_error": lulc_upper_error, + "shift_error": shift_error, + "subregion_lower_error": subregion_lower_error, + "subregion_upper_error": subregion_upper_error, + 'area_HA': area_ha + } + + logger.debug(f"Tree cover result calculated successfully: {tree_cover_result}") + return tree_cover_result + except Exception as e: + logger.error(f"Failed to calculate tree cover result: {e}", exc_info=True) + return None + + +def process_features_by_project(project_gdf, project_name, logger, params): + logger.info(f"Checking for TTC mosaic for {project_name}") + mosaic_file = f"{str(params['gdal']['outfile'])}-{project_name}-{str(params['gdal']['outfile_suffix'])}.tif" + if os.path.exists(mosaic_file): + logger.debug("TTC mosaic file found") + else: + global_lookup = s3_utils.build_tile_lookup(params) + logger.debug("Global tile lookup generated") + tile_file_list = s3_utils.download_tiles( + project_gdf, global_lookup, "project", params + ) + logger.debug("Tiles downloaded") + s3_utils.make_mosaic(tile_file_list, project_name, params) + logger.debug("Tile mosaic generated") + logger.debug(f"Mosaic file at: {mosaic_file}") + error.prep_lulc_data(project_gdf, project_name, logger, params) + poly_list = project_gdf["poly_name"].unique() + logger.info( + f"Calculating tre cover for {len(poly_list)} polygons in {project_name}" + ) + project_poly_list = [] + poly_count = 0 + for poly in poly_list: + poly_data = project_gdf[project_gdf["poly_name"] == poly] + ttc_result = calculate_tree_cover( + poly_data.iloc[0], project_name, params, logger + ) + poly_count += 1 + logger.info(f"TTC result: {str(ttc_result)} for polygon {poly_count}") + poly_name = poly_data["poly_name"] + logger.debug(f"poly_name: {poly_name}") + if ttc_result is None: + ttc_result = {} + ttc_result["poly_name"] = poly_data["poly_name"] + ttc_result["Project"] = poly_data["Project"] + project_poly_list.append(pd.DataFrame.from_dict(ttc_result)) + all_poly_df = pd.DataFrame(pd.concat(project_poly_list, ignore_index=True)) + data_path = str(params["base"]["data_path"]) + all_poly_df.to_csv( + f"{data_path}ttc_output/ttc_from_tiles_{project_name}.csv", index=False + ) + logger.info(f"Tree cover data calculated for {project_name}") diff --git a/resources/python/polygon-indicator/ttc_error_utils.py b/resources/python/polygon-indicator/ttc_error_utils.py new file mode 100644 index 000000000..eeaa55dfe --- /dev/null +++ b/resources/python/polygon-indicator/ttc_error_utils.py @@ -0,0 +1,332 @@ +import os +import shapely +import fiona +import rasterio +from rasterstats import zonal_stats +import pandas as pd +from osgeo import gdal +from shapely.geometry import Polygon, shape, box + +# from shapely import concave_hull +from shapely.ops import transform, unary_union + + +# general +def calculate_percent_error(obs, exp): + if exp != 0: + return (obs - exp) / exp + else: + return 0 + + +def force_2d(geometry): + return transform(lambda x, y, z=None: (x, y), geometry) + + +def combine_errors( + expected_ttc, + shift_error, + small_site_error, + lulc_lower_error, + lulc_upper_error, + subregion_lower_error, + subregion_upper_error, +): + shift_error_half = shift_error / 2 + small_site_error_half = small_site_error / 2 + lower_error = ( + lulc_lower_error**2 + + subregion_lower_error**2 + + shift_error_half**2 + + small_site_error_half**2 + ) ** 0.5 + upper_error = ( + lulc_upper_error**2 + + subregion_upper_error**2 + + shift_error_half**2 + + small_site_error_half**2 + ) ** 0.5 + minus = expected_ttc * lower_error + plus = expected_ttc * upper_error + plus_minus_average = (minus + plus) / 2 + return plus, minus, plus_minus_average + + +# shift error +def shift_geometry(feature, NS, EW, params): + offset = params["indicators"]["wri_tropical_tree_cover"]["shift_value"] + # Convert feature to 2D + geometry = shape(feature["geometry"]) + geometry_2d = force_2d(geometry) + + if geometry_2d.geom_type == "Polygon": + coords = list(geometry_2d.exterior.coords) + elif geometry_2d.geom_type == "MultiPolygon": + coords = list( + ( + unary_union( + ( + gpd.GeoDataFrame( + {"geometry": geometry_2d, "test": [1]} + ).explode(ignore_index=True) + )["geometry"] + ) + ).geometry.exterior.coords + ) + + # Shift coordinates + new_coords = [] + for i, (lat, lon) in enumerate(coords): + if NS == "N": + lat = lat + offset + if NS == "S": + lat = lat - offset + if EW == "E": + lon = lon + offset + if EW == "W": + lon = lon - offset + new_coords.append((lat, lon)) + new_polygon = Polygon(new_coords) + return new_polygon + + +def calculate_shift_error(feature, expected_ttc, project_name, params, logger): + shift_list = [ + ["N", 0], + ["S", 0], + [0, "E"], + [0, "W"], + ["N", "E"], + ["N", "W"], + ["S", "E"], + ["S", "W"], + ] + shift_ttc = [] + mosaic_file = f"{str(params['gdal']['outfile'])}-{project_name}-{str(params['gdal']['outfile_suffix'])}.tif" + for i in shift_list: + shift_feature = shift_geometry(feature, i[0], i[1], params) + logger.debug(f"shift calculated for {str(i)}") + if params["indicators"]["wri_tropical_tree_cover"]["data_source"] == "tiles": + with rasterio.open(mosaic_file) as src: + affine = src.transform + array = src.read(1) + shift_data = zonal_stats( + shift_feature, array, affine=affine, stats="mean", all_touched=True + )[0]["mean"] + logger.debug(f"Shift TTC: {shift_data}") + shift_ttc.append(shift_data) + shift_error = [calculate_percent_error(i, expected_ttc) for i in shift_ttc] + sq_shift_error = [i**2 for i in shift_error] + all_shift_error = (sum(sq_shift_error) / 8) ** 0.5 + return all_shift_error + + +# LULC error +def find_lulc_label(lulc_int, conversion_dict): + for key, value in conversion_dict.items(): + if lulc_int in value: + return key + return None + + +def find_lulc_error_data(lulc_label, lulc_error_table): + lulc_error_table["category"] = lulc_error_table["category"].str.lower() + # print("Lulc error table category:", lulc_error_table["category"]) + return lulc_error_table[lulc_error_table["category"] == lulc_label] + + +def prep_lulc_data(features_gdf, project_name, logger, params): + target_crs = params["indicators"]["wri_tropical_tree_cover"]["lulc"]["target_crs"] + initial_crs = features_gdf.crs.srs + if initial_crs is None: + initial_crs = params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "default_initial_crs" + ] + features_gdf.set_crs(initial_crs, inplace=True) + reproj = features_gdf.to_crs( + crs=params["indicators"]["wri_tropical_tree_cover"]["lulc"]["reproj_crs"] + ) + buffer = reproj.buffer( + params["indicators"]["wri_tropical_tree_cover"]["lulc"]["buffer_size"], + cap_style=3, + ) + buffer = buffer.to_crs(crs=target_crs) + xmin, ymin, xmax, ymax = buffer.total_bounds + logger.debug(f"xmin: {xmin}, ymin: {ymin}, xmax: {xmax}, ymax: {ymax}") + + data_path = params["base"]["data_path"] + temp_path = params["base"]["temp_path"] + lulc_global_name = params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "input_path" + ] + lulc_prefix = params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "temp_output_prefix" + ] + + global_lulc_file = f"{data_path}{lulc_global_name}" + + ds = gdal.Open(global_lulc_file) + output_file = f"{temp_path}{lulc_prefix}{project_name}.tif" + + translateoptions = gdal.TranslateOptions(projWin=[xmin, ymax, xmax, ymin]) + ds = gdal.Translate(output_file, ds, options=translateoptions) + logger.debug(f"Temp LULC file for {project_name} generated at {output_file}") + + warpoptions = gdal.WarpOptions(warpOptions=["CENTER_LONG 0"]) + ds = gdal.Warp(output_file, output_file, options=warpoptions) + + +def get_lulc_by_polygon(feature, project_name, logger, params): + geometry = shape(feature["geometry"]) + temp_path = params["base"]["temp_path"] + lulc_prefix = params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "temp_output_prefix" + ] + input_file = f"{temp_path}{lulc_prefix}{project_name}.tif" + if os.path.exists(input_file): + lulc = zonal_stats( + geometry, + input_file, + all_touched=True, + stats=params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "zonal_stats" + ], + nodata=255, + ) + logger.debug( + f"Zonal stats count: {lulc[0]['count']}, zonal stats majority: {lulc[0]['majority']}" + ) + return lulc[0]["count"], lulc[0]["majority"] + else: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), input_file) + + +def calculate_lulc_error(feature, project_name, expected_ttc, params, logger): + logger.debug(f"Calculating LULC data for {project_name}") + logger.debug(f"expected_ttc: {expected_ttc}") + if expected_ttc == 0: + lulc_lower_error = 0 + lulc_upper_error = 0 + return float(lulc_lower_error), float(lulc_upper_error) + else: + lulc_count, lulc_majority = get_lulc_by_polygon( + feature, project_name, logger, params + ) + if lulc_count > 0: + + logger.debug("getting lulc error table") + lulc_error_table = pd.read_csv( + params["indicators"]["wri_tropical_tree_cover"]["lulc_ci_data"]["path"] + ) + lulc_conversion_dict = params["indicators"]["wri_tropical_tree_cover"][ + "esa_lulc_conversions" + ] + lulc_int = int(lulc_majority) + logger.debug(f"lulc_int: {lulc_int}") + lulc_label = find_lulc_label(lulc_int, lulc_conversion_dict) + logger.debug(f"lulc_label: {lulc_label}") + lulc_error_table = find_lulc_error_data(lulc_label, lulc_error_table) + + upper_error = ( + lulc_error_table["r_upper_95"] - lulc_error_table["p_lower_95"] + ) + lower_error = ( + lulc_error_table["p_upper_95"] - lulc_error_table["r_lower_95"] + ) + logger.debug(f"lulc_upper_error: {upper_error}") + logger.debug(f"lulc_lower_error: {lower_error}") + + observed_lower_lulc = expected_ttc + lower_error + observed_upper_lulc = expected_ttc + upper_error + lulc_lower_error = (observed_lower_lulc - expected_ttc) / expected_ttc + lulc_upper_error = (observed_upper_lulc - expected_ttc) / expected_ttc + + logger.debug(f"lulc_lower_error: {lulc_lower_error}") + logger.debug(f"lulc_upper_error: {lulc_upper_error}") + + return float(lulc_lower_error), float(lulc_upper_error) + else: + logger.error("Missing LULC data") + + +# subregion error +def calculate_subregion_error(feature, expected_ttc, params, logger): + with fiona.open( + params["indicators"]["wri_tropical_tree_cover"]["subregion_ci_data"]["path"], + "r", + ) as shpin: + subregion_features = list(shpin) + subregion_polys = [shape(poly["geometry"]) for poly in subregion_features] + centroid = shape(feature["geometry"]).centroid + intersect_list = [ + feat + for feat, poly in zip(subregion_features, subregion_polys) + if poly.intersects(centroid) + ] + logger.debug(f"Intersection list length: {len(intersect_list)}") + + if intersect_list: + intersect_feature = intersect_list[0] + category = intersect_feature["properties"]["category"] + p_lower_95 = intersect_feature["properties"]["p_lower_95"] + r_lower_95 = intersect_feature["properties"]["r_lower_95"] + p_upper_95 = intersect_feature["properties"]["p_upper_95"] + r_upper_95 = intersect_feature["properties"]["r_upper_95"] + upper_error = r_upper_95 - p_lower_95 + lower_error = p_upper_95 - r_lower_95 + observed_lower_subregion = expected_ttc + lower_error + observed_upper_subregion = expected_ttc + upper_error + subregion_lower_error = calculate_percent_error( + observed_lower_subregion, expected_ttc + ) + subregion_upper_error = calculate_percent_error( + observed_upper_subregion, expected_ttc + ) + return category, subregion_lower_error, subregion_upper_error + else: + logger.debug("No subregion intersection found") + return None, 0, 0 + + +# small site error +def get_small_site_error_value(area, expected_ttc, params, logger): + small_sites_error = params["indicators"]["wri_tropical_tree_cover"][ + "small_sites_error" + ] + if ( + area + <= params["indicators"]["wri_tropical_tree_cover"]["small_sites_area_thresh"] + ): + logger.debug( + f'Polygon area of {area}ha is below threshold of {params["indicators"]["wri_tropical_tree_cover"]["small_sites_area_thresh"]}' + ) + logger.debug(f"Expected TTC: {expected_ttc}") + if ( + float(small_sites_error["zeroToNine"]["min"]) + <= expected_ttc + <= float(small_sites_error["zeroToNine"]["max"]) + ): + logger.debug(f"Small sites error is 0 - 9") + return float(small_sites_error["zeroToNine"]["error"]) / expected_ttc + + elif ( + float(small_sites_error["tenToThirtyNine"]["min"]) + < expected_ttc + <= float(small_sites_error["tenToThirtyNine"]["max"]) + ): + logger.debug(f"Small sites error is 10 - 39") + return float(small_sites_error["tenToThirtyNine"]["error"]) / expected_ttc + elif ( + float(small_sites_error["fortyTo1Hundred"]["min"]) + < expected_ttc + <= float(small_sites_error["fortyTo1Hundred"]["max"]) + ): + logger.debug(f"Small sites error is 40 - 100") + return float(small_sites_error["fortyTo1Hundred"]["error"]) / expected_ttc + else: + logger.debug("Small sites error not found") + else: + logger.debug( + f'Polygon area of {area}ha is above threshold of {params["indicators"]["wri_tropical_tree_cover"]["small_sites_area_thresh"]}' + ) + return 0.0 diff --git a/resources/python/polygon-indicator/ttc_s3_utils.py b/resources/python/polygon-indicator/ttc_s3_utils.py new file mode 100644 index 000000000..0624af65f --- /dev/null +++ b/resources/python/polygon-indicator/ttc_s3_utils.py @@ -0,0 +1,108 @@ +import pandas as pd +import os +from shapely.geometry import shape +from boto3 import client +from botocore.exceptions import ClientError +from osgeo import gdal + +pd.reset_option("mode.chained_assignment") + + +def build_tile_lookup(params): + bucket_name = params["s3"]["lookup_bucket"] + lookup_file_list = params["s3"]["lookup_filelist"] + outpath = params["base"]["temp_path"] + if params["indicators"]["wri_tropical_tree_cover"]["download_tile_lookup"]: + conn = client("s3") + for file in lookup_file_list: + conn.download_file( + Bucket=bucket_name, + Key=file, + Filename=(outpath + os.path.basename(file)), + ) + df_list = [] + for file in lookup_file_list: + filename = outpath + os.path.basename(file) + df = pd.read_csv(filename, index_col=None, header=0) + df_list.append(df) + global_lookup = pd.concat(df_list, axis=0, ignore_index=True) + return global_lookup + + +def pre_filter_tiles_feature(feature, global_lookup): + centroid = shape(feature.geometry).centroid + poly_x = centroid.x + poly_y = centroid.y + pre_filter = global_lookup[ + (abs(global_lookup["X"] - poly_x) < 0.1) + & (abs(global_lookup["Y"] - poly_y) < 0.1) + ] + pre_filter["X_tile"] = pd.to_numeric(pre_filter["X_tile"], downcast="integer") + pre_filter["Y_tile"] = pd.to_numeric(pre_filter["Y_tile"], downcast="integer") + return pre_filter + + +def pre_filter_tiles_project(project_gdf, global_lookup): + bounds = project_gdf.total_bounds + pre_filter = global_lookup[ + (global_lookup["X"] > (bounds[0] - 0.05)) + & (global_lookup["X"] < (bounds[2] + 0.05)) + & (global_lookup["Y"] > (bounds[1] - 0.05)) + & (global_lookup["Y"] < (bounds[3] + 0.05)) + ] + pre_filter["X_tile"] = pd.to_numeric(pre_filter["X_tile"], downcast="integer") + pre_filter["Y_tile"] = pd.to_numeric(pre_filter["Y_tile"], downcast="integer") + return pre_filter + + +def build_bucket_path(x_tile, y_tile, config): + filename = f"{config['indicators']['wri_tropical_tree_cover']['data_year']}/tiles/{x_tile}/{y_tile}/{x_tile}X{y_tile}Y_FINAL.tif" + return filename + + +def download_tiles(feature, global_lookup, type, params): + conn = client("s3") + if type == "polygon": + pre_filtered_lookup = pre_filter_tiles_feature(feature, global_lookup) + elif type == "project": + pre_filtered_lookup = pre_filter_tiles_project(feature, global_lookup) + tile_file_list = list( + pre_filtered_lookup.apply( + lambda row: build_bucket_path(row["X_tile"], row["Y_tile"], params), axis=1 + ) + ) + bucket_name = params["s3"]["tile_bucket"] + outpath = params["base"]["temp_path"] + directory = f"{outpath}tiles/" + if not os.path.exists(directory): + os.makedirs(directory) + tile_list = [] + for file in tile_file_list: + try: + conn.download_file( + Bucket=bucket_name, + Key=file, + Filename=(directory + os.path.basename(file)), + ) + tile_list.append(directory + os.path.basename(file)) + except ClientError as e: + print(e) + return tile_list + + +def make_mosaic(file_list, project_name, params): + gdal.BuildVRT( + f"{str(params['gdal']['outfile'])}.vrt", + file_list, + options=gdal.BuildVRTOptions(srcNodata=255, VRTNodata=255), + ) + ds = gdal.Open(f"{str(params['gdal']['outfile'])}.vrt") + translateoptions = gdal.TranslateOptions( + gdal.ParseCommandLine("-ot Byte -co COMPRESS=LZW -a_nodata 255 -co BIGTIFF=YES") + ) + ds = gdal.Translate( + f"{str(params['gdal']['outfile'])}-{project_name}-{str(params['gdal']['outfile_suffix'])}.tif", + ds, + options=translateoptions, + ) + os.remove(f"{str(params['gdal']['outfile'])}.vrt") diff --git a/routes/api_v2.php b/routes/api_v2.php index 1df197c28..199aae7b1 100644 --- a/routes/api_v2.php +++ b/routes/api_v2.php @@ -105,6 +105,11 @@ use App\Http\Controllers\V2\LeadershipTeam\StoreLeadershipTeamController; use App\Http\Controllers\V2\LeadershipTeam\UpdateLeadershipTeamController; use App\Http\Controllers\V2\MediaController; +use App\Http\Controllers\V2\MonitoredData\GetIndicatorPolygonStatusController; +use App\Http\Controllers\V2\MonitoredData\GetPolygonsIndicatorAnalysisController; +use App\Http\Controllers\V2\MonitoredData\GetPolygonsIndicatorAnalysisVerifyController; +use App\Http\Controllers\V2\MonitoredData\IndicatorEntitySlugExportController; +use App\Http\Controllers\V2\MonitoredData\RunIndicatorAnalysisController; use App\Http\Controllers\V2\Nurseries\AdminIndexNurseriesController; use App\Http\Controllers\V2\Nurseries\AdminNurseriesMultiController; use App\Http\Controllers\V2\Nurseries\CreateNurseryWithFormController; @@ -759,6 +764,16 @@ function () { Route::get('/indicator/hectares-restoration', GetHectaresRestoredController::class); }); +Route::prefix('indicators')->group(function () { + Route::post('/{slug}', RunIndicatorAnalysisController::class); + ModelInterfaceBindingMiddleware::with(EntityModel::class, function () { + Route::get('/{entity}/{slug}', GetPolygonsIndicatorAnalysisController::class); + Route::get('/{entity}/{slug}/verify', GetPolygonsIndicatorAnalysisVerifyController::class); + Route::get('/{entity}/{slug}/export', IndicatorEntitySlugExportController::class); + Route::get('/{entity}', GetIndicatorPolygonStatusController::class); + }); +}); + Route::prefix('project-pipeline')->group(function () { Route::get('/', GetProjectPipelineController::class); Route::get('/{id}', GetProjectPipelineController::class);