Skip to content

Commit

Permalink
Fix EE-Python syntax, consistent use of geemap, and format
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 623565673
  • Loading branch information
jdbcode authored and copybara-github committed Apr 10, 2024
1 parent a8b7d5c commit 2883c48
Show file tree
Hide file tree
Showing 4 changed files with 114 additions and 99 deletions.
16 changes: 9 additions & 7 deletions samples/javascript/guides/classification03.js
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ Map.addLayer(classified.clip(roi),

// [START earthengine__classification03__one_sample]
var sample = input.addBands(modis).sample({
region: roi,
numPixels: 5000,
seed: 0
});
Expand Down Expand Up @@ -152,7 +153,7 @@ sample = sample.randomColumn();

var split = 0.7; // Roughly 70% training, 30% testing.
var training = sample.filter(ee.Filter.lt('random', split));
print(training.size());
print('Training size:', training.size());
var validation = sample.filter(ee.Filter.gte('random', split));

// Spatial join.
Expand All @@ -167,24 +168,25 @@ var join = ee.Join.inverted();

// Apply the join.
training = join.apply(training, validation, distFilter);
print(training.size());
print('Training size after spatial filtering:', training.size());
// [END earthengine__classification03__spatial_autocorrelation]
// [START earthengine__classification03__export_classifier]
// Using the random forest classifier defined earlier, export the random
// Using the random forest classifier defined earlier, export the random
// forest classifier as an Earth Engine asset.
var classifierAssetId = 'projects/<PROJECT-ID>/assets/upscaled_MCD12Q1_random_forest';
Export.classifier.toAsset(
classifier,
"Saved-random-forest-IGBP-classification",
"upscaled_MCD12Q1_random_forest"
'Saved-random-forest-IGBP-classification',
classifierAssetId
);
// [END earthengine__classification03__export_classifier]

// [START earthengine__classification03__load_classifier]
// Once the classifier export finishes, we can load our saved classifier.
var classifierAssetId = "<asset_prefix>/upscaled_MCD12Q1_random_forest";
var savedClassifier = ee.Classifier.load(classifierAssetId);
// We can perform classification just as before with the saved classifier now.
Map.addLayer(input.classify(savedClassifier).clip(roi),
var classified = input.classify(savedClassifier);
Map.addLayer(classified.clip(roi),
{palette: igbpPalette, min: 0, max: 17},
'classification');
// [END earthengine__classification03__load_classifier]
29 changes: 16 additions & 13 deletions samples/python/guides/classification01.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,9 @@ def _get_factor_img(factor_names):
label = 'landcover'

# Overlay the points on the imagery to get training.
training = l8_image.select(bands).sampleRegions({
'collection': points,
'properties': [label],
'scale': 30
})
training = l8_image.select(bands).sampleRegions(
collection=points, properties=[label], scale=30
)

# Train a CART classifier with default parameters.
trained = ee.Classifier.smileCart().train(training, label, bands)
Expand All @@ -72,12 +70,17 @@ def _get_factor_img(factor_names):
classified = l8_image.select(bands).classify(trained)

# Display the inputs and the results.
Map = geemap.core.Map()
Map.setCenter(-122.0877, 37.7880, 11)
Map.addLayer(l8_image,
{'bands': ['SR_B4', 'SR_B3', 'SR_B2'], 'min': 0, 'max': 0.25},
'image')
Map.addLayer(classified,
{'min': 0, 'max': 2, 'palette': ['orange', 'green', 'blue']},
'classification')
m = geemap.Map()
m.set_center(-122.0877, 37.7880, 11)
m.add_layer(
l8_image,
{'bands': ['SR_B4', 'SR_B3', 'SR_B2'], 'min': 0, 'max': 0.25},
'image',
)
m.add_layer(
classified,
{'min': 0, 'max': 2, 'palette': ['orange', 'green', 'blue']},
'classification',
)
m
# [END earthengine__classification01__image_classify]
47 changes: 24 additions & 23 deletions samples/python/guides/classification02.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,33 +52,29 @@ def _get_factor_img(factor_names):
# Manually created polygons.
forest1 = ee.Geometry.Rectangle(-63.0187, -9.3958, -62.9793, -9.3443)
forest2 = ee.Geometry.Rectangle(-62.8145, -9.206, -62.7688, -9.1735)
nonForest1 = ee.Geometry.Rectangle(-62.8161, -9.5001, -62.7921, -9.4486)
nonForest2 = ee.Geometry.Rectangle(-62.6788, -9.044, -62.6459, -8.9986)
non_forest1 = ee.Geometry.Rectangle(-62.8161, -9.5001, -62.7921, -9.4486)
non_forest2 = ee.Geometry.Rectangle(-62.6788, -9.044, -62.6459, -8.9986)

# Make a FeatureCollection from the hand-made geometries.
polygons = ee.FeatureCollection([
ee.Feature(nonForest1, {'class': 0}),
ee.Feature(nonForest2, {'class': 0}),
ee.Feature(non_forest1, {'class': 0}),
ee.Feature(non_forest1, {'class': 0}),
ee.Feature(forest1, {'class': 1}),
ee.Feature(forest2, {'class': 1}),
])

# Get the values for all pixels in each polygon in the training.
training = l8_image.sampleRegions({
training = l8_image.sampleRegions(
# Get the sample from the polygons FeatureCollection.
'collection': polygons,
collection=polygons,
# Keep this list of properties from the polygons.
'properties': ['class'],
properties=['class'],
# Set the scale to get Landsat pixels in the polygons.
'scale': 30
})
scale=30,
)

# Create an SVM classifier with custom parameters.
classifier = ee.Classifier.libsvm({
'kernelType': 'RBF',
'gamma': 0.5,
'cost': 10
})
classifier = ee.Classifier.libsvm(kernelType='RBF', gamma=0.5, cost=10)

# Train the classifier.
trained = classifier.train(training, 'class', bands)
Expand All @@ -87,13 +83,18 @@ def _get_factor_img(factor_names):
classified = l8_image.classify(trained)

# Display the classification result and the input image.
Map = geemap.core.Map()
Map.setCenter(-62.836, -9.2399, 9)
Map.addLayer(l8_image,
{'bands': ['SR_B4', 'SR_B3', 'SR_B2'], 'min': 0, 'max': 0.25},
'image')
Map.addLayer(polygons, {'color': 'yellow'}, 'training polygons')
Map.addLayer(classified,
{'min': 0, 'max': 1, 'palette': ['orange', 'green']},
'deforestation')
m = geemap.Map()
m.set_center(-62.836, -9.2399, 9)
m.add_layer(
l8_image,
{'bands': ['SR_B4', 'SR_B3', 'SR_B2'], 'min': 0, 'max': 0.25},
'image',
)
m.add_layer(polygons, {'color': 'yellow'}, 'training polygons')
m.add_layer(
classified,
{'min': 0, 'max': 1, 'palette': ['orange', 'green']},
'deforestation',
)
m
# [END earthengine__classification02__polygon_training]
121 changes: 65 additions & 56 deletions samples/python/guides/classification03.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
# Define a region of interest.
roi = ee.Geometry.BBox(-122.93, 36.99, -121.20, 38.16)


# Define a function that scales and masks Landsat 8 surface reflectance images.
def prep_sr_l8(image):
"""Scales and masks Landsat 8 surface reflectance images."""
Expand All @@ -45,55 +44,58 @@ def _get_factor_img(factor_names):


# Make a cloud-free Landsat 8 surface reflectance composite.
inputImage = (
input_image = (
ee.ImageCollection('LANDSAT/LC08/C02/T1_L2')
.filterBounds(roi)
.filterDate('2020-03-01', '2020-07-01')
.map(prep_sr_l8)
.median()
.setDefaultProjection('EPSG:4326', None, 30)
.select(['SR_B2', 'SR_B3', 'SR_B4', 'SR_B5', 'SR_B6', 'SR_B7']))
.select(['SR_B2', 'SR_B3', 'SR_B4', 'SR_B5', 'SR_B6', 'SR_B7'])
)

# Use MODIS land cover, IGBP classification, for training.
modis = ee.Image('MODIS/006/MCD12Q1/2020_01_01').select('LC_Type1')

# Sample the input imagery to get a FeatureCollection of training data.
training = inputImage.addBands(modis).sample({
'region': roi,
'numPixels': 5000,
'seed': 0
})
training = input_image.addBands(modis).sample(
region=roi, numPixels=5000, seed=0
)

# Make a Random Forest classifier and train it.
classifier = ee.Classifier.smileRandomForest(10).train({
'features': training,
'classProperty': 'LC_Type1',
'inputProperties': [
'SR_B2', 'SR_B3', 'SR_B4', 'SR_B5', 'SR_B6', 'SR_B7']})
classifier = ee.Classifier.smileRandomForest(10).train(
features=training,
classProperty='LC_Type1',
inputProperties=['SR_B2', 'SR_B3', 'SR_B4', 'SR_B5', 'SR_B6', 'SR_B7'],
)

# Classify the input imagery.
classified = inputImage.classify(classifier)
classified = input_image.classify(classifier)

# Get a confusion matrix representing resubstitution accuracy.
train_accuracy = classifier.confusionMatrix()
print('Resubstitution error matrix: ', train_accuracy)
print('Training overall accuracy: ', train_accuracy.accuracy())
display('Resubstitution error matrix:', train_accuracy)
display('Training overall accuracy:', train_accuracy.accuracy())

# Sample the input with a different random seed to get validation data.
validation = inputImage.addBands(modis).sample({
'region': roi,
'numPixels': 5000,
'seed': 1
# Filter the result to get rid of any null pixels.
}).filter(ee.Filter.notNull(inputImage.bandNames()))
validation = (
input_image.addBands(modis)
.sample(
region=roi,
numPixels=5000,
seed=1,
# Filter the result to get rid of any null pixels.
)
.filter(ee.Filter.notNull(input_image.bandNames()))
)

# Classify the validation data.
validated = validation.classify(classifier)

# Get a confusion matrix representing expected accuracy.
testAccuracy = validated.errorMatrix('LC_Type1', 'classification')
print('Validation error matrix: ', testAccuracy)
print('Validation overall accuracy: ', testAccuracy.accuracy())
test_accuracy = validated.errorMatrix('LC_Type1', 'classification')
display('Validation error matrix:', test_accuracy)
display('Validation overall accuracy:', test_accuracy.accuracy())

# Define a palette for the IGBP classification.
igbp_palette = [
Expand All @@ -110,20 +112,23 @@ def _get_factor_img(factor_names):
]

# Display the input and the classification with geemap in a notebook.
Map = geemap.Map
Map.centerObject(roi, 10)
Map.addLayer(inputImage.clip(roi),
{'bands': ['SR_B4', 'SR_B3', 'SR_B2'], 'min': 0, 'max': 0.25},
'landsat')
Map.addLayer(classified.clip(roi),
{'palette': igbp_palette, 'min': 0, 'max': 17},
'classification')
m = geemap.Map()
m.center_object(roi, 10)
m.add_layer(
input_image.clip(roi),
{'bands': ['SR_B4', 'SR_B3', 'SR_B2'], 'min': 0, 'max': 0.25},
'landsat',
)
m.add_layer(
classified.clip(roi),
{'palette': igbp_palette, 'min': 0, 'max': 17},
'classification',
)
m
# [END earthengine__classification03__sample]

# [START earthengine__classification03__one_sample]
sample = inputImage.addBands(modis).sample({
'numPixels': 5000,
'seed': 0
})
sample = input_image.addBands(modis).sample(region=roi, numPixels=5000, seed=0)

# The randomColumn() method will add a column of uniform random
# numbers in a column named 'random' by default.
Expand All @@ -136,51 +141,55 @@ def _get_factor_img(factor_names):

# [START earthengine__classification03__spatial_autocorrelation]
# Sample the input imagery to get a FeatureCollection of training data.
sample = inputImage.addBands(modis).sample({
'region': roi,
'numPixels': 5000,
'seed': 0,
'geometries': True,
'tileScale': 16
})
sample = input_image.addBands(modis).sample(
region=roi, numPixels=5000, seed=0, geometries=True, tileScale=16
)

# The randomColumn() method will add a column of uniform random
# numbers in a column named 'random' by default.
sample = sample.randomColumn()

split = 0.7 # Roughly 70% training, 30% testing.
training = sample.filter(ee.Filter.lt('random', split))
print(training.size())
display('Training size:', training.size())
validation = sample.filter(ee.Filter.gte('random', split))

# Spatial join.
dist_filter = ee.Filter.withinDistance({
'distance': 1000,
'leftField': '.geo',
'rightField': '.geo',
'maxError': 10
})
dist_filter = ee.Filter.withinDistance(
distance=1000, leftField='.geo', rightField='.geo', maxError=10
)

join = ee.Join.inverted()

# Apply the join.
training = join.apply(training, validation, dist_filter)
print(training.size())
display('Training size after spatial filtering:', training.size())
# [END earthengine__classification03__spatial_autocorrelation]

# [START earthengine__classification03__export_classifier]
# Using the random forest classifier defined earlier, export the random
# forest classifier as an Earth Engine asset.
classifier_asset_id = '<asset_prefix>/upscaled_MCD12Q1_random_forest'
classifier_asset_id = (
'projects/<PROJECT-ID>/assets/upscaled_MCD12Q1_random_forest'
)
task = ee.batch.Export.classifier.toAsset(
classifier, 'Saved-random-forest-IGBP-classification', classifier_asset_id
)
task.start()
# [END earthengine__classification03__export_classifier]

# [START earthengine__classification03__load_classifier]
# Once the classifier export finishes, we can load our saved classifier.
saved_classifier = ee.Classifier.load(classifier_asset_id)
# We can perform classification just as before with the saved classifier now.
Map.addLayer(inputImage.classify(saved_classifier).clip(roi),
{'palette': igbp_palette, 'min': 0, 'max': 17},
'classification')
classified = input_image.classify(saved_classifier)

m = geemap.Map()
m.center_object(roi, 10)
m.add_layer(
classified.clip(roi),
{'palette': igbp_palette, 'min': 0, 'max': 17},
'classification',
)
m
# [END earthengine__classification03__load_classifier]

0 comments on commit 2883c48

Please sign in to comment.