From 88c3c71bd904b93b9551668d393a1aeb47afb5e6 Mon Sep 17 00:00:00 2001 From: Ujaval Gandhi Date: Fri, 26 Jan 2024 14:47:39 +0530 Subject: [PATCH] update --- .../04c_Mosaics_and_Composites_(exercise) | 4 + .../05c_Feature_Collections_(exercise) | 16 +- .../06c_Import_(exercise) | 11 +- .../07c_Clipping_(exercise) | 8 +- .../08b_Export_(complete) | 9 +- .../08c_Export_(exercise) | 8 +- .../01c_Earth_Engine_Objects_(exercise) | 6 +- .../Hyperparameter_Tuning | 507 ++++++----- .../04c_Mosaics_and_Composites_(exercise) | 4 + .../05c_Feature_Collections_(exercise) | 16 +- .../06c_Import_(exercise) | 11 +- .../07c_Clipping_(exercise) | 8 +- .../08b_Export_(complete) | 9 +- .../08c_Export_(exercise) | 8 +- .../01c_Earth_Engine_Objects_(exercise) | 6 +- .../Hyperparameter_Tuning | 507 ++++++----- docs/end-to-end-gee.html | 850 +++++++++--------- 17 files changed, 1036 insertions(+), 952 deletions(-) diff --git a/code/end_to_end_gee/01-Earth-Engine-Basics/04c_Mosaics_and_Composites_(exercise) b/code/end_to_end_gee/01-Earth-Engine-Basics/04c_Mosaics_and_Composites_(exercise) index 91c14a04..a468afdb 100644 --- a/code/end_to_end_gee/01-Earth-Engine-Basics/04c_Mosaics_and_Composites_(exercise) +++ b/code/end_to_end_gee/01-Earth-Engine-Basics/04c_Mosaics_and_Composites_(exercise) @@ -14,4 +14,8 @@ var filtered = s2.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) var image2019 = filtered.median(); Map.addLayer(image2019, rgbVis, '2019'); +// Exercise + +// Delete the 'geometry' variable +// Add a point at your chosen location // Create a median composite for 2020 and load it to the map diff --git a/code/end_to_end_gee/01-Earth-Engine-Basics/05c_Feature_Collections_(exercise) b/code/end_to_end_gee/01-Earth-Engine-Basics/05c_Feature_Collections_(exercise) index 5baed2b5..248a07b7 100644 --- a/code/end_to_end_gee/01-Earth-Engine-Basics/05c_Feature_Collections_(exercise) +++ b/code/end_to_end_gee/01-Earth-Engine-Basics/05c_Feature_Collections_(exercise) @@ -1,9 +1,13 @@ -// Apply a filter to select only the 'Bangalore Urban' district -// Display only the selected district +var admin2 = ee.FeatureCollection('FAO/GAUL_SIMPLIFIED_500m/2015/level2'); +Map.addLayer(admin2, {color: 'grey'}, 'All Admin2 Polygons'); -// Hint: The district names are in ADM2_NAME property +// Exercise +// Apply filters to select your chosen Admin2 region +// Display the results in 'red' color -var admin2 = ee.FeatureCollection('FAO/GAUL_SIMPLIFIED_500m/2015/level2'); -var karnataka = admin2.filter(ee.Filter.eq('ADM1_NAME', 'Karnataka')); +// Hint1: Swith to the 'Inspector' tab and click on any +// polygon to know its properties and their values -var visParams = {'color': 'red'}; +// Hint2: Many countries do not have unique names for +// Admin2 regions. Make sure to apply a filter to select +// the Admin1 region that contains your chosen Admin2 region diff --git a/code/end_to_end_gee/01-Earth-Engine-Basics/06c_Import_(exercise) b/code/end_to_end_gee/01-Earth-Engine-Basics/06c_Import_(exercise) index 8f925103..b5bdaf1e 100644 --- a/code/end_to_end_gee/01-Earth-Engine-Basics/06c_Import_(exercise) +++ b/code/end_to_end_gee/01-Earth-Engine-Basics/06c_Import_(exercise) @@ -1,10 +1,13 @@ +var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers'); +print(urban.first()); + +// Exercise // Apply a filter to select only large urban centers // in your country and display it on the map. // Select all urban centers in your country with // a population greater than 1000000 -// Use the property 'CTR_MN_NM' containing country names -// Use the property 'P15' containing 2015 Population -var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers'); -print(urban.first()) \ No newline at end of file +// Hint1: Use the property 'CTR_MN_NM' containing country names +// Hint2: Use the property 'P15' containing 2015 Population + diff --git a/code/end_to_end_gee/01-Earth-Engine-Basics/07c_Clipping_(exercise) b/code/end_to_end_gee/01-Earth-Engine-Basics/07c_Clipping_(exercise) index d2b41335..0743194b 100644 --- a/code/end_to_end_gee/01-Earth-Engine-Basics/07c_Clipping_(exercise) +++ b/code/end_to_end_gee/01-Earth-Engine-Basics/07c_Clipping_(exercise) @@ -1,9 +1,6 @@ var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED'); var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers'); -// Change the filter to your home city or any urban area of your choice -// Find the name of the urban centre -// by adding the layer to the map and using Inspector. var filtered = urban.filter(ee.Filter.eq('UC_NM_MN', 'Bengaluru')); var geometry = filtered.geometry(); @@ -23,3 +20,8 @@ var clipped = image.clip(geometry); Map.addLayer(clipped, rgbVis, 'Clipped'); +// Exercise +// Change the filter to your home city or any urban area of your choice + +// Hint: Find the name of the urban centre by adding the +// 'urban' layer to the map and using Inspector. diff --git a/code/end_to_end_gee/01-Earth-Engine-Basics/08b_Export_(complete) b/code/end_to_end_gee/01-Earth-Engine-Basics/08b_Export_(complete) index 153a826d..9a90c1fa 100644 --- a/code/end_to_end_gee/01-Earth-Engine-Basics/08b_Export_(complete) +++ b/code/end_to_end_gee/01-Earth-Engine-Basics/08b_Export_(complete) @@ -22,6 +22,7 @@ Map.addLayer(clipped, rgbVis, 'Clipped'); var exportImage = clipped.select('B.*'); +// Export raw image with original pixel values Export.image.toDrive({ image: exportImage, description: 'Bangalore_Composite_Raw', @@ -32,14 +33,14 @@ Export.image.toDrive({ maxPixels: 1e9 }); +// Export visualized image with RGB + // Rather than exporting raw bands, we can apply a rendered image // visualize() function allows you to apply the same parameters // that are used in earth engine which exports a 3-band RGB image -print(clipped); + +// Note: Visualized images are not suitable for analysis var visualized = clipped.visualize(rgbVis); -print(visualized); -// Now the 'visualized' image is RGB image, no need to give visParams -Map.addLayer(visualized, {}, 'Visualized Image'); Export.image.toDrive({ image: visualized, diff --git a/code/end_to_end_gee/01-Earth-Engine-Basics/08c_Export_(exercise) b/code/end_to_end_gee/01-Earth-Engine-Basics/08c_Export_(exercise) index b836104b..48851065 100644 --- a/code/end_to_end_gee/01-Earth-Engine-Basics/08c_Export_(exercise) +++ b/code/end_to_end_gee/01-Earth-Engine-Basics/08c_Export_(exercise) @@ -1,6 +1,3 @@ -// Change the filter to your city -// Write the export function to export the results - var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED'); var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers'); @@ -24,3 +21,8 @@ Map.addLayer(clipped, rgbVis, 'Clipped'); var exportImage = clipped.select('B.*'); +// Exercise +// Change the filter to select your city +// Write the export function to export the results + + diff --git a/code/end_to_end_gee/02-Earth-Engine-Intermediate/01c_Earth_Engine_Objects_(exercise) b/code/end_to_end_gee/02-Earth-Engine-Intermediate/01c_Earth_Engine_Objects_(exercise) index 1c96766f..0467703b 100644 --- a/code/end_to_end_gee/02-Earth-Engine-Intermediate/01c_Earth_Engine_Objects_(exercise) +++ b/code/end_to_end_gee/02-Earth-Engine-Intermediate/01c_Earth_Engine_Objects_(exercise) @@ -1,8 +1,10 @@ var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED'); var geometry = ee.Geometry.Point([77.60412933051538, 12.952912912328241]); -var now = Date.now() -var now = ee.Date(now) +var now = Date.now(); +var now = ee.Date(now); + +// Exercise // Apply another filter to the collection below to filter images // collected in the last 1-month // Do not hard-code the dates, it should always show images diff --git a/code/end_to_end_gee/Supplement/Supervised_Classification/Hyperparameter_Tuning b/code/end_to_end_gee/Supplement/Supervised_Classification/Hyperparameter_Tuning index 9b57cade..8d55efd7 100644 --- a/code/end_to_end_gee/Supplement/Supervised_Classification/Hyperparameter_Tuning +++ b/code/end_to_end_gee/Supplement/Supervised_Classification/Hyperparameter_Tuning @@ -1,241 +1,266 @@ -var s2 = ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED"); -var basin = ee.FeatureCollection("WWF/HydroSHEDS/v1/Basins/hybas_7"); -var gcp = ee.FeatureCollection("users/ujavalgandhi/e2e/arkavathy_gcps"); -var alos = ee.Image("JAXA/ALOS/AW3D30/V2_2"); - - -var arkavathy = basin.filter(ee.Filter.eq('HYBAS_ID', 4071139640)) -var boundary = arkavathy.geometry() -var rgbVis = { - min: 0.0, - max: 3000, - bands: ['B4', 'B3', 'B2'], -}; -// Function to remove cloud and snow pixels from Sentinel-2 SR image - -function maskCloudAndShadowsSR(image) { - var cloudProb = image.select('MSK_CLDPRB'); - var snowProb = image.select('MSK_SNWPRB'); - var cloud = cloudProb.lt(10); - var scl = image.select('SCL'); - var shadow = scl.eq(3); // 3 = cloud shadow - var cirrus = scl.eq(10); // 10 = cirrus - // Cloud probability less than 10% or cloud shadow classification - var mask = cloud.and(cirrus.neq(1)).and(shadow.neq(1)); - return image.updateMask(mask); -} - - -var filtered = s2 -.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) - .filter(ee.Filter.date('2019-01-01', '2020-01-01')) - .filter(ee.Filter.bounds(boundary)) - .map(maskCloudAndShadowsSR) - .select('B.*') - -var composite = filtered.median().clip(boundary) - -var visParams = {bands: ['B4', 'B3', 'B2'], min: 0, max: 3000, gamma: 1.2}; -Map.centerObject(boundary) -Map.addLayer(composite, visParams, 'RGB'); - -var addIndices = function(image) { - var ndvi = image.normalizedDifference(['B8', 'B4']).rename(['ndvi']); - var ndbi = image.normalizedDifference(['B11', 'B8']).rename(['ndbi']); - var mndwi = image.normalizedDifference(['B3', 'B11']).rename(['mndwi']); - var bsi = image.expression( - '(( X + Y ) - (A + B)) /(( X + Y ) + (A + B)) ', { - 'X': image.select('B11'), //swir1 - 'Y': image.select('B4'), //red - 'A': image.select('B8'), // nir - 'B': image.select('B2'), // blue - }).rename('bsi'); - return image.addBands(ndvi).addBands(ndbi).addBands(mndwi).addBands(bsi) -} - -var composite = addIndices(composite); - - -// Calculate Slope and Elevation -var elev = alos.select('AVE_DSM').rename('elev'); -var slope = ee.Terrain.slope(alos.select('AVE_DSM')).rename('slope'); - -var composite = composite.addBands(elev).addBands(slope); - - - -// Normalize the image - -// Machine learning algorithms work best on images when all features have -// the same range - -// Function to Normalize Image -// Pixel Values should be between 0 and 1 -// Formula is (x - xmin) / (xmax - xmin) -//************************************************************************** -function normalize(image){ - var bandNames = image.bandNames(); - // Compute min and max of the image - var minDict = image.reduceRegion({ - reducer: ee.Reducer.min(), - geometry: boundary, - scale: 20, - maxPixels: 1e9, - bestEffort: true, - tileScale: 16 - }); - var maxDict = image.reduceRegion({ - reducer: ee.Reducer.max(), - geometry: boundary, - scale: 20, - maxPixels: 1e9, - bestEffort: true, - tileScale: 16 - }); - var mins = ee.Image.constant(minDict.values(bandNames)); - var maxs = ee.Image.constant(maxDict.values(bandNames)); - - var normalized = image.subtract(mins).divide(maxs.subtract(mins)) - return normalized -} - -var composite = normalize(composite); -// Add a random column and split the GCPs into training and validation set -var gcp = gcp.randomColumn() - -// This being a simpler classification, we take 60% points -// for validation. Normal recommended ratio is -// 70% training, 30% validation -var trainingGcp = gcp.filter(ee.Filter.lt('random', 0.6)); -var validationGcp = gcp.filter(ee.Filter.gte('random', 0.6)); -// Overlay the point on the image to get training data. -var training = composite.sampleRegions({ - collection: trainingGcp, - properties: ['landcover'], - scale: 10, - tileScale: 16 -}); -print(training) -// Train a classifier. -var classifier = ee.Classifier.smileRandomForest(50) -.train({ - features: training, - classProperty: 'landcover', - inputProperties: composite.bandNames() -}); - -//************************************************************************** -// Feature Importance -//************************************************************************** - -// Run .explain() to see what the classifer looks like -print(classifier.explain()) - -// Calculate variable importance -var importance = ee.Dictionary(classifier.explain().get('importance')) - -// Calculate relative importance -var sum = importance.values().reduce(ee.Reducer.sum()) - -var relativeImportance = importance.map(function(key, val) { - return (ee.Number(val).multiply(100)).divide(sum) - }) -print(relativeImportance) - -// Create a FeatureCollection so we can chart it -var importanceFc = ee.FeatureCollection([ - ee.Feature(null, relativeImportance) -]) - -var chart = ui.Chart.feature.byProperty({ - features: importanceFc -}).setOptions({ - title: 'Feature Importance', - vAxis: {title: 'Importance'}, - hAxis: {title: 'Feature'} - }) -print(chart) - -//************************************************************************** -// Hyperparameter Tuning -//************************************************************************** - -var test = composite.sampleRegions({ - collection: validationGcp, - properties: ['landcover'], - scale: 10, - tileScale: 16 -}); - - -// Tune the numberOfTrees parameter. -var numTreesList = ee.List.sequence(10, 150, 10); - -var accuracies = numTreesList.map(function(numTrees) { - var classifier = ee.Classifier.smileRandomForest(numTrees) - .train({ - features: training, - classProperty: 'landcover', - inputProperties: composite.bandNames() - }); - - // Here we are classifying a table instead of an image - // Classifiers work on both images and tables - return test - .classify(classifier) - .errorMatrix('landcover', 'classification') - .accuracy(); -}); - -var chart = ui.Chart.array.values({ - array: ee.Array(accuracies), - axis: 0, - xLabels: numTreesList - }).setOptions({ - title: 'Hyperparameter Tuning for the numberOfTrees Parameters', - vAxis: {title: 'Validation Accuracy'}, - hAxis: {title: 'Number of Tress', gridlines: {count: 15}} - }); -print(chart) - -// Tuning Multiple Parameters -// We can tune many parameters together using -// nested map() functions -// Let's tune 2 parameters -// numTrees and bagFraction -var numTreesList = ee.List.sequence(10, 150, 10); -var bagFractionList = ee.List.sequence(0.1, 0.9, 0.1); - -var accuracies = numTreesList.map(function(numTrees) { - return bagFractionList.map(function(bagFraction) { - var classifier = ee.Classifier.smileRandomForest({ - numberOfTrees: numTrees, - bagFraction: bagFraction - }) - .train({ - features: training, - classProperty: 'landcover', - inputProperties: composite.bandNames() - }); - - // Here we are classifying a table instead of an image - // Classifiers work on both images and tables - var accuracy = test - .classify(classifier) - .errorMatrix('landcover', 'classification') - .accuracy(); - return ee.Feature(null, {'accuracy': accuracy, - 'numberOfTrees': numTrees, - 'bagFraction': bagFraction}) - }) -}).flatten() -var resultFc = ee.FeatureCollection(accuracies) - -// Export the result as CSV -Export.table.toDrive({ - collection: resultFc, - description: 'Multiple_Parameter_Tuning_Results', - folder: 'earthengine', - fileNamePrefix: 'numtrees_bagfraction', - fileFormat: 'CSV'}) - \ No newline at end of file +var s2 = ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED"); +var basin = ee.FeatureCollection("WWF/HydroSHEDS/v1/Basins/hybas_7"); +var gcp = ee.FeatureCollection("users/ujavalgandhi/e2e/arkavathy_gcps"); +var alos = ee.Image("JAXA/ALOS/AW3D30/V2_2"); + + +var arkavathy = basin.filter(ee.Filter.eq('HYBAS_ID', 4071139640)) +var boundary = arkavathy.geometry() +var rgbVis = { + min: 0.0, + max: 3000, + bands: ['B4', 'B3', 'B2'], +}; +// Function to remove cloud and snow pixels from Sentinel-2 SR image + +function maskCloudAndShadowsSR(image) { + var cloudProb = image.select('MSK_CLDPRB'); + var snowProb = image.select('MSK_SNWPRB'); + var cloud = cloudProb.lt(10); + var scl = image.select('SCL'); + var shadow = scl.eq(3); // 3 = cloud shadow + var cirrus = scl.eq(10); // 10 = cirrus + // Cloud probability less than 10% or cloud shadow classification + var mask = cloud.and(cirrus.neq(1)).and(shadow.neq(1)); + return image.updateMask(mask); +} + + +var filtered = s2 +.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) + .filter(ee.Filter.date('2019-01-01', '2020-01-01')) + .filter(ee.Filter.bounds(boundary)) + .map(maskCloudAndShadowsSR) + .select('B.*') + +var composite = filtered.median().clip(boundary) + +var visParams = {bands: ['B4', 'B3', 'B2'], min: 0, max: 3000, gamma: 1.2}; +Map.centerObject(boundary) +Map.addLayer(composite, visParams, 'RGB'); + +var addIndices = function(image) { + var ndvi = image.normalizedDifference(['B8', 'B4']).rename(['ndvi']); + var ndbi = image.normalizedDifference(['B11', 'B8']).rename(['ndbi']); + var mndwi = image.normalizedDifference(['B3', 'B11']).rename(['mndwi']); + var bsi = image.expression( + '(( X + Y ) - (A + B)) /(( X + Y ) + (A + B)) ', { + 'X': image.select('B11'), //swir1 + 'Y': image.select('B4'), //red + 'A': image.select('B8'), // nir + 'B': image.select('B2'), // blue + }).rename('bsi'); + return image.addBands(ndvi).addBands(ndbi).addBands(mndwi).addBands(bsi) +} + +var composite = addIndices(composite); + + +// Calculate Slope and Elevation +var elev = alos.select('AVE_DSM').rename('elev'); +var slope = ee.Terrain.slope(alos.select('AVE_DSM')).rename('slope'); + +var composite = composite.addBands(elev).addBands(slope); + + + +// Normalize the image + +// Machine learning algorithms work best on images when all features have +// the same range + +// Function to Normalize Image +// Pixel Values should be between 0 and 1 +// Formula is (x - xmin) / (xmax - xmin) +//************************************************************************** +function normalize(image){ + var bandNames = image.bandNames(); + // Compute min and max of the image + var minDict = image.reduceRegion({ + reducer: ee.Reducer.min(), + geometry: boundary, + scale: 20, + maxPixels: 1e9, + bestEffort: true, + tileScale: 16 + }); + var maxDict = image.reduceRegion({ + reducer: ee.Reducer.max(), + geometry: boundary, + scale: 20, + maxPixels: 1e9, + bestEffort: true, + tileScale: 16 + }); + var mins = ee.Image.constant(minDict.values(bandNames)); + var maxs = ee.Image.constant(maxDict.values(bandNames)); + + var normalized = image.subtract(mins).divide(maxs.subtract(mins)) + return normalized +} + +var composite = normalize(composite); +// Add a random column and split the GCPs into training and validation set +var gcp = gcp.randomColumn() + +// This being a simpler classification, we take 60% points +// for validation. Normal recommended ratio is +// 70% training, 30% validation +var trainingGcp = gcp.filter(ee.Filter.lt('random', 0.6)); +var validationGcp = gcp.filter(ee.Filter.gte('random', 0.6)); +// Overlay the point on the image to get training data. +var training = composite.sampleRegions({ + collection: trainingGcp, + properties: ['landcover'], + scale: 10, + tileScale: 16 +}); +print(training) +// Train a classifier. +var classifier = ee.Classifier.smileRandomForest(50) +.train({ + features: training, + classProperty: 'landcover', + inputProperties: composite.bandNames() +}); + +//************************************************************************** +// Feature Importance +//************************************************************************** + +// Run .explain() to see what the classifer looks like +print(classifier.explain()) + +// Calculate variable importance +var importance = ee.Dictionary(classifier.explain().get('importance')) + +// Calculate relative importance +var sum = importance.values().reduce(ee.Reducer.sum()) + +var relativeImportance = importance.map(function(key, val) { + return (ee.Number(val).multiply(100)).divide(sum) + }) +print(relativeImportance) + +// Create a FeatureCollection so we can chart it +var importanceFc = ee.FeatureCollection([ + ee.Feature(null, relativeImportance) +]) + +var chart = ui.Chart.feature.byProperty({ + features: importanceFc +}).setOptions({ + title: 'Feature Importance', + vAxis: {title: 'Importance'}, + hAxis: {title: 'Feature'} + }) +print(chart) + +//************************************************************************** +// Hyperparameter Tuning +//************************************************************************** + +var test = composite.sampleRegions({ + collection: validationGcp, + properties: ['landcover'], + scale: 10, + tileScale: 16 +}); + + +// Tune the numberOfTrees parameter. +var numTreesList = ee.List.sequence(10, 150, 10); + +var accuracies = numTreesList.map(function(numTrees) { + var classifier = ee.Classifier.smileRandomForest(numTrees) + .train({ + features: training, + classProperty: 'landcover', + inputProperties: composite.bandNames() + }); + + // Here we are classifying a table instead of an image + // Classifiers work on both images and tables + return test + .classify(classifier) + .errorMatrix('landcover', 'classification') + .accuracy(); +}); + +var chart = ui.Chart.array.values({ + array: ee.Array(accuracies), + axis: 0, + xLabels: numTreesList + }).setOptions({ + title: 'Hyperparameter Tuning for the numberOfTrees Parameters', + vAxis: {title: 'Validation Accuracy'}, + hAxis: {title: 'Number of Tress', gridlines: {count: 15}} + }); +print(chart) + +// Tuning Multiple Parameters +// We can tune many parameters together using +// nested map() functions +// Let's tune 2 parameters +// numTrees and bagFraction +var numTreesList = ee.List.sequence(10, 150, 10); +var bagFractionList = ee.List.sequence(0.1, 0.9, 0.1); + +var accuracies = numTreesList.map(function(numTrees) { + return bagFractionList.map(function(bagFraction) { + var classifier = ee.Classifier.smileRandomForest({ + numberOfTrees: numTrees, + bagFraction: bagFraction + }) + .train({ + features: training, + classProperty: 'landcover', + inputProperties: composite.bandNames() + }); + + // Here we are classifying a table instead of an image + // Classifiers work on both images and tables + var accuracy = test + .classify(classifier) + .errorMatrix('landcover', 'classification') + .accuracy(); + return ee.Feature(null, {'accuracy': accuracy, + 'numberOfTrees': numTrees, + 'bagFraction': bagFraction}) + }) +}).flatten() +var resultFc = ee.FeatureCollection(accuracies) + +// Export the result as CSV +Export.table.toDrive({ + collection: resultFc, + description: 'Multiple_Parameter_Tuning_Results', + folder: 'earthengine', + fileNamePrefix: 'numtrees_bagfraction', + fileFormat: 'CSV'}); + +// Alternatively we can automatically pick the parameters +// that result in the highest accuracy +var resultFcSorted = resultFc.sort('accuracy', false); +var highestAccuracyFeature = resultFcSorted.first(); +var highestAccuracy = highestAccuracyFeature.getNumber('accuracy'); +var optimalNumTrees = highestAccuracyFeature.getNumber('numberOfTrees'); +var optimalBagFraction = highestAccuracyFeature.getNumber('bagFraction'); + +// Use the optimal parameters in a model and perform final classification +var optimalModel = ee.Classifier.smileRandomForest({ + numberOfTrees: optimalNumTrees, + bagFraction: optimalBagFraction +}).train({ + features: training, + classProperty: 'landcover', + inputProperties: composite.bandNames() +}); + +var finalClassification = composite.classify(optimalModel); + +// Printing or Displaying the image may time out as it requires +// extensive computation to find the optimal parameters + +// Export the 'finalClassification' to Asset and import the +// result to view it. \ No newline at end of file diff --git a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/04c_Mosaics_and_Composites_(exercise) b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/04c_Mosaics_and_Composites_(exercise) index 91c14a04..a468afdb 100644 --- a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/04c_Mosaics_and_Composites_(exercise) +++ b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/04c_Mosaics_and_Composites_(exercise) @@ -14,4 +14,8 @@ var filtered = s2.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) var image2019 = filtered.median(); Map.addLayer(image2019, rgbVis, '2019'); +// Exercise + +// Delete the 'geometry' variable +// Add a point at your chosen location // Create a median composite for 2020 and load it to the map diff --git a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/05c_Feature_Collections_(exercise) b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/05c_Feature_Collections_(exercise) index 5baed2b5..248a07b7 100644 --- a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/05c_Feature_Collections_(exercise) +++ b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/05c_Feature_Collections_(exercise) @@ -1,9 +1,13 @@ -// Apply a filter to select only the 'Bangalore Urban' district -// Display only the selected district +var admin2 = ee.FeatureCollection('FAO/GAUL_SIMPLIFIED_500m/2015/level2'); +Map.addLayer(admin2, {color: 'grey'}, 'All Admin2 Polygons'); -// Hint: The district names are in ADM2_NAME property +// Exercise +// Apply filters to select your chosen Admin2 region +// Display the results in 'red' color -var admin2 = ee.FeatureCollection('FAO/GAUL_SIMPLIFIED_500m/2015/level2'); -var karnataka = admin2.filter(ee.Filter.eq('ADM1_NAME', 'Karnataka')); +// Hint1: Swith to the 'Inspector' tab and click on any +// polygon to know its properties and their values -var visParams = {'color': 'red'}; +// Hint2: Many countries do not have unique names for +// Admin2 regions. Make sure to apply a filter to select +// the Admin1 region that contains your chosen Admin2 region diff --git a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/06c_Import_(exercise) b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/06c_Import_(exercise) index 8f925103..b5bdaf1e 100644 --- a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/06c_Import_(exercise) +++ b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/06c_Import_(exercise) @@ -1,10 +1,13 @@ +var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers'); +print(urban.first()); + +// Exercise // Apply a filter to select only large urban centers // in your country and display it on the map. // Select all urban centers in your country with // a population greater than 1000000 -// Use the property 'CTR_MN_NM' containing country names -// Use the property 'P15' containing 2015 Population -var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers'); -print(urban.first()) \ No newline at end of file +// Hint1: Use the property 'CTR_MN_NM' containing country names +// Hint2: Use the property 'P15' containing 2015 Population + diff --git a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/07c_Clipping_(exercise) b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/07c_Clipping_(exercise) index d2b41335..0743194b 100644 --- a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/07c_Clipping_(exercise) +++ b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/07c_Clipping_(exercise) @@ -1,9 +1,6 @@ var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED'); var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers'); -// Change the filter to your home city or any urban area of your choice -// Find the name of the urban centre -// by adding the layer to the map and using Inspector. var filtered = urban.filter(ee.Filter.eq('UC_NM_MN', 'Bengaluru')); var geometry = filtered.geometry(); @@ -23,3 +20,8 @@ var clipped = image.clip(geometry); Map.addLayer(clipped, rgbVis, 'Clipped'); +// Exercise +// Change the filter to your home city or any urban area of your choice + +// Hint: Find the name of the urban centre by adding the +// 'urban' layer to the map and using Inspector. diff --git a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/08b_Export_(complete) b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/08b_Export_(complete) index 153a826d..9a90c1fa 100644 --- a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/08b_Export_(complete) +++ b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/08b_Export_(complete) @@ -22,6 +22,7 @@ Map.addLayer(clipped, rgbVis, 'Clipped'); var exportImage = clipped.select('B.*'); +// Export raw image with original pixel values Export.image.toDrive({ image: exportImage, description: 'Bangalore_Composite_Raw', @@ -32,14 +33,14 @@ Export.image.toDrive({ maxPixels: 1e9 }); +// Export visualized image with RGB + // Rather than exporting raw bands, we can apply a rendered image // visualize() function allows you to apply the same parameters // that are used in earth engine which exports a 3-band RGB image -print(clipped); + +// Note: Visualized images are not suitable for analysis var visualized = clipped.visualize(rgbVis); -print(visualized); -// Now the 'visualized' image is RGB image, no need to give visParams -Map.addLayer(visualized, {}, 'Visualized Image'); Export.image.toDrive({ image: visualized, diff --git a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/08c_Export_(exercise) b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/08c_Export_(exercise) index b836104b..48851065 100644 --- a/docs/code/end_to_end_gee/01-Earth-Engine-Basics/08c_Export_(exercise) +++ b/docs/code/end_to_end_gee/01-Earth-Engine-Basics/08c_Export_(exercise) @@ -1,6 +1,3 @@ -// Change the filter to your city -// Write the export function to export the results - var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED'); var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers'); @@ -24,3 +21,8 @@ Map.addLayer(clipped, rgbVis, 'Clipped'); var exportImage = clipped.select('B.*'); +// Exercise +// Change the filter to select your city +// Write the export function to export the results + + diff --git a/docs/code/end_to_end_gee/02-Earth-Engine-Intermediate/01c_Earth_Engine_Objects_(exercise) b/docs/code/end_to_end_gee/02-Earth-Engine-Intermediate/01c_Earth_Engine_Objects_(exercise) index 1c96766f..0467703b 100644 --- a/docs/code/end_to_end_gee/02-Earth-Engine-Intermediate/01c_Earth_Engine_Objects_(exercise) +++ b/docs/code/end_to_end_gee/02-Earth-Engine-Intermediate/01c_Earth_Engine_Objects_(exercise) @@ -1,8 +1,10 @@ var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED'); var geometry = ee.Geometry.Point([77.60412933051538, 12.952912912328241]); -var now = Date.now() -var now = ee.Date(now) +var now = Date.now(); +var now = ee.Date(now); + +// Exercise // Apply another filter to the collection below to filter images // collected in the last 1-month // Do not hard-code the dates, it should always show images diff --git a/docs/code/end_to_end_gee/Supplement/Supervised_Classification/Hyperparameter_Tuning b/docs/code/end_to_end_gee/Supplement/Supervised_Classification/Hyperparameter_Tuning index 9b57cade..8d55efd7 100644 --- a/docs/code/end_to_end_gee/Supplement/Supervised_Classification/Hyperparameter_Tuning +++ b/docs/code/end_to_end_gee/Supplement/Supervised_Classification/Hyperparameter_Tuning @@ -1,241 +1,266 @@ -var s2 = ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED"); -var basin = ee.FeatureCollection("WWF/HydroSHEDS/v1/Basins/hybas_7"); -var gcp = ee.FeatureCollection("users/ujavalgandhi/e2e/arkavathy_gcps"); -var alos = ee.Image("JAXA/ALOS/AW3D30/V2_2"); - - -var arkavathy = basin.filter(ee.Filter.eq('HYBAS_ID', 4071139640)) -var boundary = arkavathy.geometry() -var rgbVis = { - min: 0.0, - max: 3000, - bands: ['B4', 'B3', 'B2'], -}; -// Function to remove cloud and snow pixels from Sentinel-2 SR image - -function maskCloudAndShadowsSR(image) { - var cloudProb = image.select('MSK_CLDPRB'); - var snowProb = image.select('MSK_SNWPRB'); - var cloud = cloudProb.lt(10); - var scl = image.select('SCL'); - var shadow = scl.eq(3); // 3 = cloud shadow - var cirrus = scl.eq(10); // 10 = cirrus - // Cloud probability less than 10% or cloud shadow classification - var mask = cloud.and(cirrus.neq(1)).and(shadow.neq(1)); - return image.updateMask(mask); -} - - -var filtered = s2 -.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) - .filter(ee.Filter.date('2019-01-01', '2020-01-01')) - .filter(ee.Filter.bounds(boundary)) - .map(maskCloudAndShadowsSR) - .select('B.*') - -var composite = filtered.median().clip(boundary) - -var visParams = {bands: ['B4', 'B3', 'B2'], min: 0, max: 3000, gamma: 1.2}; -Map.centerObject(boundary) -Map.addLayer(composite, visParams, 'RGB'); - -var addIndices = function(image) { - var ndvi = image.normalizedDifference(['B8', 'B4']).rename(['ndvi']); - var ndbi = image.normalizedDifference(['B11', 'B8']).rename(['ndbi']); - var mndwi = image.normalizedDifference(['B3', 'B11']).rename(['mndwi']); - var bsi = image.expression( - '(( X + Y ) - (A + B)) /(( X + Y ) + (A + B)) ', { - 'X': image.select('B11'), //swir1 - 'Y': image.select('B4'), //red - 'A': image.select('B8'), // nir - 'B': image.select('B2'), // blue - }).rename('bsi'); - return image.addBands(ndvi).addBands(ndbi).addBands(mndwi).addBands(bsi) -} - -var composite = addIndices(composite); - - -// Calculate Slope and Elevation -var elev = alos.select('AVE_DSM').rename('elev'); -var slope = ee.Terrain.slope(alos.select('AVE_DSM')).rename('slope'); - -var composite = composite.addBands(elev).addBands(slope); - - - -// Normalize the image - -// Machine learning algorithms work best on images when all features have -// the same range - -// Function to Normalize Image -// Pixel Values should be between 0 and 1 -// Formula is (x - xmin) / (xmax - xmin) -//************************************************************************** -function normalize(image){ - var bandNames = image.bandNames(); - // Compute min and max of the image - var minDict = image.reduceRegion({ - reducer: ee.Reducer.min(), - geometry: boundary, - scale: 20, - maxPixels: 1e9, - bestEffort: true, - tileScale: 16 - }); - var maxDict = image.reduceRegion({ - reducer: ee.Reducer.max(), - geometry: boundary, - scale: 20, - maxPixels: 1e9, - bestEffort: true, - tileScale: 16 - }); - var mins = ee.Image.constant(minDict.values(bandNames)); - var maxs = ee.Image.constant(maxDict.values(bandNames)); - - var normalized = image.subtract(mins).divide(maxs.subtract(mins)) - return normalized -} - -var composite = normalize(composite); -// Add a random column and split the GCPs into training and validation set -var gcp = gcp.randomColumn() - -// This being a simpler classification, we take 60% points -// for validation. Normal recommended ratio is -// 70% training, 30% validation -var trainingGcp = gcp.filter(ee.Filter.lt('random', 0.6)); -var validationGcp = gcp.filter(ee.Filter.gte('random', 0.6)); -// Overlay the point on the image to get training data. -var training = composite.sampleRegions({ - collection: trainingGcp, - properties: ['landcover'], - scale: 10, - tileScale: 16 -}); -print(training) -// Train a classifier. -var classifier = ee.Classifier.smileRandomForest(50) -.train({ - features: training, - classProperty: 'landcover', - inputProperties: composite.bandNames() -}); - -//************************************************************************** -// Feature Importance -//************************************************************************** - -// Run .explain() to see what the classifer looks like -print(classifier.explain()) - -// Calculate variable importance -var importance = ee.Dictionary(classifier.explain().get('importance')) - -// Calculate relative importance -var sum = importance.values().reduce(ee.Reducer.sum()) - -var relativeImportance = importance.map(function(key, val) { - return (ee.Number(val).multiply(100)).divide(sum) - }) -print(relativeImportance) - -// Create a FeatureCollection so we can chart it -var importanceFc = ee.FeatureCollection([ - ee.Feature(null, relativeImportance) -]) - -var chart = ui.Chart.feature.byProperty({ - features: importanceFc -}).setOptions({ - title: 'Feature Importance', - vAxis: {title: 'Importance'}, - hAxis: {title: 'Feature'} - }) -print(chart) - -//************************************************************************** -// Hyperparameter Tuning -//************************************************************************** - -var test = composite.sampleRegions({ - collection: validationGcp, - properties: ['landcover'], - scale: 10, - tileScale: 16 -}); - - -// Tune the numberOfTrees parameter. -var numTreesList = ee.List.sequence(10, 150, 10); - -var accuracies = numTreesList.map(function(numTrees) { - var classifier = ee.Classifier.smileRandomForest(numTrees) - .train({ - features: training, - classProperty: 'landcover', - inputProperties: composite.bandNames() - }); - - // Here we are classifying a table instead of an image - // Classifiers work on both images and tables - return test - .classify(classifier) - .errorMatrix('landcover', 'classification') - .accuracy(); -}); - -var chart = ui.Chart.array.values({ - array: ee.Array(accuracies), - axis: 0, - xLabels: numTreesList - }).setOptions({ - title: 'Hyperparameter Tuning for the numberOfTrees Parameters', - vAxis: {title: 'Validation Accuracy'}, - hAxis: {title: 'Number of Tress', gridlines: {count: 15}} - }); -print(chart) - -// Tuning Multiple Parameters -// We can tune many parameters together using -// nested map() functions -// Let's tune 2 parameters -// numTrees and bagFraction -var numTreesList = ee.List.sequence(10, 150, 10); -var bagFractionList = ee.List.sequence(0.1, 0.9, 0.1); - -var accuracies = numTreesList.map(function(numTrees) { - return bagFractionList.map(function(bagFraction) { - var classifier = ee.Classifier.smileRandomForest({ - numberOfTrees: numTrees, - bagFraction: bagFraction - }) - .train({ - features: training, - classProperty: 'landcover', - inputProperties: composite.bandNames() - }); - - // Here we are classifying a table instead of an image - // Classifiers work on both images and tables - var accuracy = test - .classify(classifier) - .errorMatrix('landcover', 'classification') - .accuracy(); - return ee.Feature(null, {'accuracy': accuracy, - 'numberOfTrees': numTrees, - 'bagFraction': bagFraction}) - }) -}).flatten() -var resultFc = ee.FeatureCollection(accuracies) - -// Export the result as CSV -Export.table.toDrive({ - collection: resultFc, - description: 'Multiple_Parameter_Tuning_Results', - folder: 'earthengine', - fileNamePrefix: 'numtrees_bagfraction', - fileFormat: 'CSV'}) - \ No newline at end of file +var s2 = ee.ImageCollection("COPERNICUS/S2_SR_HARMONIZED"); +var basin = ee.FeatureCollection("WWF/HydroSHEDS/v1/Basins/hybas_7"); +var gcp = ee.FeatureCollection("users/ujavalgandhi/e2e/arkavathy_gcps"); +var alos = ee.Image("JAXA/ALOS/AW3D30/V2_2"); + + +var arkavathy = basin.filter(ee.Filter.eq('HYBAS_ID', 4071139640)) +var boundary = arkavathy.geometry() +var rgbVis = { + min: 0.0, + max: 3000, + bands: ['B4', 'B3', 'B2'], +}; +// Function to remove cloud and snow pixels from Sentinel-2 SR image + +function maskCloudAndShadowsSR(image) { + var cloudProb = image.select('MSK_CLDPRB'); + var snowProb = image.select('MSK_SNWPRB'); + var cloud = cloudProb.lt(10); + var scl = image.select('SCL'); + var shadow = scl.eq(3); // 3 = cloud shadow + var cirrus = scl.eq(10); // 10 = cirrus + // Cloud probability less than 10% or cloud shadow classification + var mask = cloud.and(cirrus.neq(1)).and(shadow.neq(1)); + return image.updateMask(mask); +} + + +var filtered = s2 +.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) + .filter(ee.Filter.date('2019-01-01', '2020-01-01')) + .filter(ee.Filter.bounds(boundary)) + .map(maskCloudAndShadowsSR) + .select('B.*') + +var composite = filtered.median().clip(boundary) + +var visParams = {bands: ['B4', 'B3', 'B2'], min: 0, max: 3000, gamma: 1.2}; +Map.centerObject(boundary) +Map.addLayer(composite, visParams, 'RGB'); + +var addIndices = function(image) { + var ndvi = image.normalizedDifference(['B8', 'B4']).rename(['ndvi']); + var ndbi = image.normalizedDifference(['B11', 'B8']).rename(['ndbi']); + var mndwi = image.normalizedDifference(['B3', 'B11']).rename(['mndwi']); + var bsi = image.expression( + '(( X + Y ) - (A + B)) /(( X + Y ) + (A + B)) ', { + 'X': image.select('B11'), //swir1 + 'Y': image.select('B4'), //red + 'A': image.select('B8'), // nir + 'B': image.select('B2'), // blue + }).rename('bsi'); + return image.addBands(ndvi).addBands(ndbi).addBands(mndwi).addBands(bsi) +} + +var composite = addIndices(composite); + + +// Calculate Slope and Elevation +var elev = alos.select('AVE_DSM').rename('elev'); +var slope = ee.Terrain.slope(alos.select('AVE_DSM')).rename('slope'); + +var composite = composite.addBands(elev).addBands(slope); + + + +// Normalize the image + +// Machine learning algorithms work best on images when all features have +// the same range + +// Function to Normalize Image +// Pixel Values should be between 0 and 1 +// Formula is (x - xmin) / (xmax - xmin) +//************************************************************************** +function normalize(image){ + var bandNames = image.bandNames(); + // Compute min and max of the image + var minDict = image.reduceRegion({ + reducer: ee.Reducer.min(), + geometry: boundary, + scale: 20, + maxPixels: 1e9, + bestEffort: true, + tileScale: 16 + }); + var maxDict = image.reduceRegion({ + reducer: ee.Reducer.max(), + geometry: boundary, + scale: 20, + maxPixels: 1e9, + bestEffort: true, + tileScale: 16 + }); + var mins = ee.Image.constant(minDict.values(bandNames)); + var maxs = ee.Image.constant(maxDict.values(bandNames)); + + var normalized = image.subtract(mins).divide(maxs.subtract(mins)) + return normalized +} + +var composite = normalize(composite); +// Add a random column and split the GCPs into training and validation set +var gcp = gcp.randomColumn() + +// This being a simpler classification, we take 60% points +// for validation. Normal recommended ratio is +// 70% training, 30% validation +var trainingGcp = gcp.filter(ee.Filter.lt('random', 0.6)); +var validationGcp = gcp.filter(ee.Filter.gte('random', 0.6)); +// Overlay the point on the image to get training data. +var training = composite.sampleRegions({ + collection: trainingGcp, + properties: ['landcover'], + scale: 10, + tileScale: 16 +}); +print(training) +// Train a classifier. +var classifier = ee.Classifier.smileRandomForest(50) +.train({ + features: training, + classProperty: 'landcover', + inputProperties: composite.bandNames() +}); + +//************************************************************************** +// Feature Importance +//************************************************************************** + +// Run .explain() to see what the classifer looks like +print(classifier.explain()) + +// Calculate variable importance +var importance = ee.Dictionary(classifier.explain().get('importance')) + +// Calculate relative importance +var sum = importance.values().reduce(ee.Reducer.sum()) + +var relativeImportance = importance.map(function(key, val) { + return (ee.Number(val).multiply(100)).divide(sum) + }) +print(relativeImportance) + +// Create a FeatureCollection so we can chart it +var importanceFc = ee.FeatureCollection([ + ee.Feature(null, relativeImportance) +]) + +var chart = ui.Chart.feature.byProperty({ + features: importanceFc +}).setOptions({ + title: 'Feature Importance', + vAxis: {title: 'Importance'}, + hAxis: {title: 'Feature'} + }) +print(chart) + +//************************************************************************** +// Hyperparameter Tuning +//************************************************************************** + +var test = composite.sampleRegions({ + collection: validationGcp, + properties: ['landcover'], + scale: 10, + tileScale: 16 +}); + + +// Tune the numberOfTrees parameter. +var numTreesList = ee.List.sequence(10, 150, 10); + +var accuracies = numTreesList.map(function(numTrees) { + var classifier = ee.Classifier.smileRandomForest(numTrees) + .train({ + features: training, + classProperty: 'landcover', + inputProperties: composite.bandNames() + }); + + // Here we are classifying a table instead of an image + // Classifiers work on both images and tables + return test + .classify(classifier) + .errorMatrix('landcover', 'classification') + .accuracy(); +}); + +var chart = ui.Chart.array.values({ + array: ee.Array(accuracies), + axis: 0, + xLabels: numTreesList + }).setOptions({ + title: 'Hyperparameter Tuning for the numberOfTrees Parameters', + vAxis: {title: 'Validation Accuracy'}, + hAxis: {title: 'Number of Tress', gridlines: {count: 15}} + }); +print(chart) + +// Tuning Multiple Parameters +// We can tune many parameters together using +// nested map() functions +// Let's tune 2 parameters +// numTrees and bagFraction +var numTreesList = ee.List.sequence(10, 150, 10); +var bagFractionList = ee.List.sequence(0.1, 0.9, 0.1); + +var accuracies = numTreesList.map(function(numTrees) { + return bagFractionList.map(function(bagFraction) { + var classifier = ee.Classifier.smileRandomForest({ + numberOfTrees: numTrees, + bagFraction: bagFraction + }) + .train({ + features: training, + classProperty: 'landcover', + inputProperties: composite.bandNames() + }); + + // Here we are classifying a table instead of an image + // Classifiers work on both images and tables + var accuracy = test + .classify(classifier) + .errorMatrix('landcover', 'classification') + .accuracy(); + return ee.Feature(null, {'accuracy': accuracy, + 'numberOfTrees': numTrees, + 'bagFraction': bagFraction}) + }) +}).flatten() +var resultFc = ee.FeatureCollection(accuracies) + +// Export the result as CSV +Export.table.toDrive({ + collection: resultFc, + description: 'Multiple_Parameter_Tuning_Results', + folder: 'earthengine', + fileNamePrefix: 'numtrees_bagfraction', + fileFormat: 'CSV'}); + +// Alternatively we can automatically pick the parameters +// that result in the highest accuracy +var resultFcSorted = resultFc.sort('accuracy', false); +var highestAccuracyFeature = resultFcSorted.first(); +var highestAccuracy = highestAccuracyFeature.getNumber('accuracy'); +var optimalNumTrees = highestAccuracyFeature.getNumber('numberOfTrees'); +var optimalBagFraction = highestAccuracyFeature.getNumber('bagFraction'); + +// Use the optimal parameters in a model and perform final classification +var optimalModel = ee.Classifier.smileRandomForest({ + numberOfTrees: optimalNumTrees, + bagFraction: optimalBagFraction +}).train({ + features: training, + classProperty: 'landcover', + inputProperties: composite.bandNames() +}); + +var finalClassification = composite.classify(optimalModel); + +// Printing or Displaying the image may time out as it requires +// extensive computation to find the optimal parameters + +// Export the 'finalClassification' to Asset and import the +// result to view it. \ No newline at end of file diff --git a/docs/end-to-end-gee.html b/docs/end-to-end-gee.html index 1fb2af16..46f0ae71 100644 --- a/docs/end-to-end-gee.html +++ b/docs/end-to-end-gee.html @@ -1173,15 +1173,19 @@

Exercise

Try in Code Editor ↗

-
// Apply a filter to select only the 'Bangalore Urban' district
-// Display only the selected district
+
var admin2 = ee.FeatureCollection('FAO/GAUL_SIMPLIFIED_500m/2015/level2');
+Map.addLayer(admin2, {color: 'grey'}, 'All Admin2 Polygons');
 
-// Hint: The district names are in ADM2_NAME property
-
-var admin2 = ee.FeatureCollection('FAO/GAUL_SIMPLIFIED_500m/2015/level2');
-var karnataka = admin2.filter(ee.Filter.eq('ADM1_NAME', 'Karnataka'));
-
-var visParams = {'color': 'red'};
+// Exercise +// Apply filters to select your chosen Admin2 region +// Display the results in 'red' color + +// Hint1: Swith to the 'Inspector' tab and click on any +// polygon to know its properties and their values + +// Hint2: Many countries do not have unique names for +// Admin2 regions. Make sure to apply a filter to select +// the Admin1 region that contains your chosen Admin2 region
@@ -1228,16 +1232,18 @@

Exercise

href="https://code.earthengine.google.co.in/?scriptPath=users%2Fujavalgandhi%2FEnd-to-End-GEE%3A01-Earth-Engine-Basics%2F06c_Import_(exercise)" target="_blank">Try in Code Editor ↗

// Apply a filter to select only large urban centers
-// in your country and display it on the map.
+class="sourceCode js">var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers');
+print(urban.first());
 
-// Select all urban centers in your country with
-// a population greater than 1000000
-
-// Use the property 'CTR_MN_NM' containing country names
-// Use the property 'P15' containing 2015 Population
-var urban = ee.FeatureCollection('users/ujavalgandhi/e2e/ghs_urban_centers');
-print(urban.first())
+// Exercise +// Apply a filter to select only large urban centers +// in your country and display it on the map. + +// Select all urban centers in your country with +// a population greater than 1000000 + +// Hint1: Use the property 'CTR_MN_NM' containing country names +// Hint2: Use the property 'P15' containing 2015 Population
@@ -1357,34 +1363,35 @@

08. Exporting Data

var exportImage = clipped.select('B.*'); -Export.image.toDrive({ - image: exportImage, - description: 'Bangalore_Composite_Raw', - folder: 'earthengine', - fileNamePrefix: 'bangalore_composite_raw', - region: geometry, - scale: 10, - maxPixels: 1e9 -}); - -// Rather than exporting raw bands, we can apply a rendered image -// visualize() function allows you to apply the same parameters -// that are used in earth engine which exports a 3-band RGB image -print(clipped); -var visualized = clipped.visualize(rgbVis); -print(visualized); -// Now the 'visualized' image is RGB image, no need to give visParams -Map.addLayer(visualized, {}, 'Visualized Image'); - -Export.image.toDrive({ - image: visualized, - description: 'Bangalore_Composite_Visualized', - folder: 'earthengine', - fileNamePrefix: 'bangalore_composite_visualized', - region: geometry, - scale: 10, - maxPixels: 1e9 -});
+// Export raw image with original pixel values +Export.image.toDrive({ + image: exportImage, + description: 'Bangalore_Composite_Raw', + folder: 'earthengine', + fileNamePrefix: 'bangalore_composite_raw', + region: geometry, + scale: 10, + maxPixels: 1e9 +}); + +// Export visualized image with RGB + +// Rather than exporting raw bands, we can apply a rendered image +// visualize() function allows you to apply the same parameters +// that are used in earth engine which exports a 3-band RGB image + +// Note: Visualized images are not suitable for analysis +var visualized = clipped.visualize(rgbVis); + +Export.image.toDrive({ + image: visualized, + description: 'Bangalore_Composite_Visualized', + folder: 'earthengine', + fileNamePrefix: 'bangalore_composite_visualized', + region: geometry, + scale: 10, + maxPixels: 1e9 +});

Exercise

Exercise class="sourceCode js">var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED'); var geometry = ee.Geometry.Point([77.60412933051538, 12.952912912328241]); -var now = Date.now() -var now = ee.Date(now) -// Apply another filter to the collection below to filter images -// collected in the last 1-month -// Do not hard-code the dates, it should always show images -// from the past 1-month whenever you run the script -// Hint: Use ee.Date.advance() function -// to compute the date 1 month before now -var filtered = s2.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) - .filter(ee.Filter.bounds(geometry))

+var now = Date.now(); +var now = ee.Date(now); + +// Exercise +// Apply another filter to the collection below to filter images +// collected in the last 1-month +// Do not hard-code the dates, it should always show images +// from the past 1-month whenever you run the script +// Hint: Use ee.Date.advance() function +// to compute the date 1 month before now +var filtered = s2.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) + .filter(ee.Filter.bounds(geometry))
@@ -4801,6 +4810,9 @@

Automatic Conversion using GUI

m = geemap.Map(width=800)
 m
+

You will see the auto-converted code displayed. Copy and paste it +into a new cell and run it. Your code will be run using the GEE Python +API.

geometry = ee.Geometry.Point([77.60412933051538, 12.952912912328241])
 s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED')
@@ -4815,35 +4827,21 @@ 

Automatic Conversion using GUI

m.centerObject(geometry, 10) m.addLayer(medianComposite, rgbVis, 'Median Composite')
-

You will see the auto-converted code displayed. Copy and paste it -into a new cell and run it. Your code will be run using the GEE Python -API. If your code loads any layers, they will be loadded on the map -widget. To display it, open a new code cell and just type m -to display the widget.

+

If your code loads any layers, they will be loaded on the map widget. +To display it, open a new code cell and just type m to +display the widget.

m
-
-

Note The auto-conversion is almost perfect and works flawlessly on -most GEE code. One place it misses is during the conversion of function -arguments specified as a dicitonary. You will need to prefix the -resulting code with ** to specify them as -**kwargs. For example, the geemap converter -produces code such as below.

-
stats = image.reduceRegion({
-'reducer': ee.Reducer.mean(),
-'geometry': geometry,
-'scale': 10,
-'maxPixels': 1e10
-})
-

To make this valid GEE Python API code - prefix the argument -dictionary with **.

-
stats = image.reduceRegion(**{
-'reducer': ee.Reducer.mean(),
-'geometry': geometry,
-'scale': 10,
-'maxPixels': 1e10
-})
-
+

The auto-conversion is almost perfect and works flawlessly on most +GEE code.

+

One place it misses is during the conversion of function arguments +specified as a dicitonary. You will need to prefix the resulting code +with ** to specify them as **kwargs. For +example, the geemap converter produces code such as below. +stats = image.reduceRegion({ 'reducer': ee.Reducer.mean(), 'geometry': geometry, 'scale': 10, 'maxPixels': 1e10 }) +To make this valid GEE Python API code - prefix the argument dictionary +with **. +stats = image.reduceRegion(**{ 'reducer': ee.Reducer.mean(), 'geometry': geometry, 'scale': 10, 'maxPixels': 1e10 })

Automatic Conversion using Code

@@ -4851,107 +4849,107 @@

Automatic Conversion using Code

that can be used to perform the conversion using code. This is useful for batch conversions. To use this, we first create a string with the javascript code.

+
javascript_code = """
+var geometry = ee.Geometry.Point([107.61303468448624, 12.130969369851766]);
+Map.centerObject(geometry, 12)
+var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED')
+var rgbVis = {
+  min: 0.0,
+  max: 3000,
+  bands: ['B4', 'B3', 'B2'],
+};
+
+// Write a function for Cloud masking
+function maskS2clouds(image) {
+  var qa = image.select('QA60')
+  var cloudBitMask = 1 << 10;
+  var cirrusBitMask = 1 << 11;
+  var mask = qa.bitwiseAnd(cloudBitMask).eq(0).and(
+             qa.bitwiseAnd(cirrusBitMask).eq(0))
+  return image.updateMask(mask)
+      .select("B.*")
+      .copyProperties(image, ["system:time_start"])
+}
+
+var filtered = s2
+  .filter(ee.Filter.date('2019-01-01', '2019-12-31'))
+  .filter(ee.Filter.bounds(geometry))
+  .map(maskS2clouds)
+
+
+// Write a function that computes NDVI for an image and adds it as a band
+function addNDVI(image) {
+  var ndvi = image.normalizedDifference(['B5', 'B4']).rename('ndvi');
+  return image.addBands(ndvi);
+}
+
+var withNdvi = filtered.map(addNDVI);
+
+var composite = withNdvi.median()
+palette = [
+  'FFFFFF', 'CE7E45', 'DF923D', 'F1B555', 'FCD163', '99B718',
+  '74A901', '66A000', '529400', '3E8601', '207401', '056201',
+  '004C00', '023B01', '012E01', '011D01', '011301'];
+
+ndviVis = {min:0, max:0.5, palette: palette }
+Map.addLayer(withNdvi.select('ndvi'), ndviVis, 'NDVI Composite')
+
+"""
+
lines = geemap.js_snippet_to_py(
+    javascript_code, add_new_cell=False,
+    import_ee=True, import_geemap=True, show_map=True)
+for line in lines:
+    print(line.rstrip())
+

The automatic conversion works great. Review it and paste it to the +cell below.

javascript_code = """
-var geometry = ee.Geometry.Point([107.61303468448624, 12.130969369851766]);
-Map.centerObject(geometry, 12)
-var s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED')
-var rgbVis = {
-  min: 0.0,
-  max: 3000,
-  bands: ['B4', 'B3', 'B2'],
-};
-
-// Write a function for Cloud masking
-function maskS2clouds(image) {
-  var qa = image.select('QA60')
-  var cloudBitMask = 1 << 10;
-  var cirrusBitMask = 1 << 11;
-  var mask = qa.bitwiseAnd(cloudBitMask).eq(0).and(
-             qa.bitwiseAnd(cirrusBitMask).eq(0))
-  return image.updateMask(mask)
-      .select("B.*")
-      .copyProperties(image, ["system:time_start"])
-}
-
-var filtered = s2
-  .filter(ee.Filter.date('2019-01-01', '2019-12-31'))
-  .filter(ee.Filter.bounds(geometry))
-  .map(maskS2clouds)
-
-
-// Write a function that computes NDVI for an image and adds it as a band
-function addNDVI(image) {
-  var ndvi = image.normalizedDifference(['B5', 'B4']).rename('ndvi');
-  return image.addBands(ndvi);
-}
+class="sourceCode python">import ee
+import geemap
+m = geemap.Map()
+
+geometry = ee.Geometry.Point([107.61303468448624, 12.130969369851766])
+m.centerObject(geometry, 12)
+s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED')
+rgbVis = {
+  'min': 0.0,
+  'max': 3000,
+  'bands': ['B4', 'B3', 'B2'],
+}
+
+# Write a function for Cloud masking
+def maskS2clouds(image):
+  qa = image.select('QA60')
+  cloudBitMask = 1 << 10
+  cirrusBitMask = 1 << 11
+  mask = qa.bitwiseAnd(cloudBitMask).eq(0).And(
+             qa.bitwiseAnd(cirrusBitMask).eq(0))
+  return image.updateMask(mask) \
+      .select("B.*") \
+      .copyProperties(image, ["system:time_start"])
+
+filtered = s2 \
+  .filter(ee.Filter.date('2019-01-01', '2019-12-31')) \
+  .filter(ee.Filter.bounds(geometry)) \
+  .map(maskS2clouds)
+
+# Write a function that computes NDVI for an image and adds it as a band
+def addNDVI(image):
+  ndvi = image.normalizedDifference(['B5', 'B4']).rename('ndvi')
+  return image.addBands(ndvi)
 
-var withNdvi = filtered.map(addNDVI);
+withNdvi = filtered.map(addNDVI)
 
-var composite = withNdvi.median()
-palette = [
-  'FFFFFF', 'CE7E45', 'DF923D', 'F1B555', 'FCD163', '99B718',
-  '74A901', '66A000', '529400', '3E8601', '207401', '056201',
-  '004C00', '023B01', '012E01', '011D01', '011301'];
+composite = withNdvi.median()
+palette = [
+  'FFFFFF', 'CE7E45', 'DF923D', 'F1B555', 'FCD163', '99B718',
+  '74A901', '66A000', '529400', '3E8601', '207401', '056201',
+  '004C00', '023B01', '012E01', '011D01', '011301']
 
-ndviVis = {min:0, max:0.5, palette: palette }
-Map.addLayer(withNdvi.select('ndvi'), ndviVis, 'NDVI Composite')
-
-"""
-
lines = geemap.js_snippet_to_py(
-    javascript_code, add_new_cell=False,
-    import_ee=True, import_geemap=True, show_map=True)
-for line in lines:
-    print(line.rstrip())
-

The automatic conversion works great. Review it and paste it to the -cell below.

-
import ee
-import geemap
-m = geemap.Map()
-
-geometry = ee.Geometry.Point([107.61303468448624, 12.130969369851766])
-m.centerObject(geometry, 12)
-s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED')
-rgbVis = {
-  'min': 0.0,
-  'max': 3000,
-  'bands': ['B4', 'B3', 'B2'],
-}
-
-# Write a function for Cloud masking
-def maskS2clouds(image):
-  qa = image.select('QA60')
-  cloudBitMask = 1 << 10
-  cirrusBitMask = 1 << 11
-  mask = qa.bitwiseAnd(cloudBitMask).eq(0).And(
-             qa.bitwiseAnd(cirrusBitMask).eq(0))
-  return image.updateMask(mask) \
-      .select("B.*") \
-      .copyProperties(image, ["system:time_start"])
-
-filtered = s2 \
-  .filter(ee.Filter.date('2019-01-01', '2019-12-31')) \
-  .filter(ee.Filter.bounds(geometry)) \
-  .map(maskS2clouds)
-
-# Write a function that computes NDVI for an image and adds it as a band
-def addNDVI(image):
-  ndvi = image.normalizedDifference(['B5', 'B4']).rename('ndvi')
-  return image.addBands(ndvi)
-
-withNdvi = filtered.map(addNDVI)
-
-composite = withNdvi.median()
-palette = [
-  'FFFFFF', 'CE7E45', 'DF923D', 'F1B555', 'FCD163', '99B718',
-  '74A901', '66A000', '529400', '3E8601', '207401', '056201',
-  '004C00', '023B01', '012E01', '011D01', '011301']
-
-ndviVis = {'min':0, 'max':0.5, 'palette': palette }
-m.addLayer(withNdvi.select('ndvi'), ndviVis, 'NDVI Composite')
-m
+ndviVis = {'min':0, 'max':0.5, 'palette': palette } +m.addLayer(withNdvi.select('ndvi'), ndviVis, 'NDVI Composite') +m

Exercise

@@ -4997,97 +4995,97 @@

Initialization

associated with your GEE account. Replace the cloud_project with your own project from Google Cloud Console.

-
import ee
-
cloud_project = 'spatialthoughts'
-
-try:
-    ee.Initialize(project=cloud_project)
-except:
-    ee.Authenticate()
-    ee.Initialize(project=cloud_project)
+
import ee
+
cloud_project = 'spatialthoughts'
+
+try:
+    ee.Initialize(project=cloud_project)
+except:
+    ee.Authenticate()
+    ee.Initialize(project=cloud_project)

Create a Collection

-
geometry = ee.Geometry.Point([107.61303468448624, 12.130969369851766])
-s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED')
-rgbVis = {
-  'min': 0.0,
-  'max': 3000,
-  'bands': ['B4', 'B3', 'B2'],
-}
-
-# Write a function for Cloud masking
-def maskS2clouds(image):
-  qa = image.select('QA60')
-  cloudBitMask = 1 << 10
-  cirrusBitMask = 1 << 11
-  mask = qa.bitwiseAnd(cloudBitMask).eq(0).And(
-             qa.bitwiseAnd(cirrusBitMask).eq(0))
-  return image.updateMask(mask) \
-      .select("B.*") \
-      .copyProperties(image, ["system:time_start"])
-
-filtered = s2 \
-  .filter(ee.Filter.date('2019-01-01', '2020-01-01')) \
-  .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) \
-  .filter(ee.Filter.bounds(geometry)) \
-  .map(maskS2clouds)
-
-# Write a function that computes NDVI for an image and adds it as a band
-def addNDVI(image):
-  ndvi = image.normalizedDifference(['B5', 'B4']).rename('ndvi')
-  return image.addBands(ndvi)
-
-withNdvi = filtered.map(addNDVI)
+
geometry = ee.Geometry.Point([107.61303468448624, 12.130969369851766])
+s2 = ee.ImageCollection('COPERNICUS/S2_HARMONIZED')
+rgbVis = {
+  'min': 0.0,
+  'max': 3000,
+  'bands': ['B4', 'B3', 'B2'],
+}
+
+# Write a function for Cloud masking
+def maskS2clouds(image):
+  qa = image.select('QA60')
+  cloudBitMask = 1 << 10
+  cirrusBitMask = 1 << 11
+  mask = qa.bitwiseAnd(cloudBitMask).eq(0).And(
+             qa.bitwiseAnd(cirrusBitMask).eq(0))
+  return image.updateMask(mask) \
+      .select("B.*") \
+      .copyProperties(image, ["system:time_start"])
+
+filtered = s2 \
+  .filter(ee.Filter.date('2019-01-01', '2020-01-01')) \
+  .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 30)) \
+  .filter(ee.Filter.bounds(geometry)) \
+  .map(maskS2clouds)
+
+# Write a function that computes NDVI for an image and adds it as a band
+def addNDVI(image):
+  ndvi = image.normalizedDifference(['B5', 'B4']).rename('ndvi')
+  return image.addBands(ndvi)
+
+withNdvi = filtered.map(addNDVI)

Export All Images

Exports are done via the ee.batch module. This module allows you to automatically start an export - making it suitable for batch exports.

-
image_ids = withNdvi.aggregate_array('system:index').getInfo()
-print('Total images: ', len(image_ids))
-
# Export with 100m resolution for this demo
-for i, image_id in enumerate(image_ids):
-  image = ee.Image(withNdvi.filter(ee.Filter.eq('system:index', image_id)).first())
-  task = ee.batch.Export.image.toDrive(**{
-    'image': image.select('ndvi'),
-    'description': 'Image Export {}'.format(i+1),
-    'fileNamePrefix': image_id,
-    'folder':'earthengine',
-    'scale': 100,
-    'region': image.geometry(),
-    'maxPixels': 1e10
-  })
-  task.start()
-  print('Started Task: ', i+1)
+
image_ids = withNdvi.aggregate_array('system:index').getInfo()
+print('Total images: ', len(image_ids))
+
# Export with 100m resolution for this demo
+for i, image_id in enumerate(image_ids):
+  image = ee.Image(withNdvi.filter(ee.Filter.eq('system:index', image_id)).first())
+  task = ee.batch.Export.image.toDrive(**{
+    'image': image.select('ndvi'),
+    'description': 'Image Export {}'.format(i+1),
+    'fileNamePrefix': image_id,
+    'folder':'earthengine',
+    'scale': 100,
+    'region': image.geometry(),
+    'maxPixels': 1e10
+  })
+  task.start()
+  print('Started Task: ', i+1)

Manage Running/Waiting Tasks

You can manage tasks as well. Get a list of tasks and get state information on them

-
tasks = ee.batch.Task.list()
-for task in tasks:
-  task_id = task.status()['id']
-  task_state = task.status()['state']
-  print(task_id, task_state)
+
tasks = ee.batch.Task.list()
+for task in tasks:
+  task_id = task.status()['id']
+  task_state = task.status()['state']
+  print(task_id, task_state)

You can cancel tasks as well

-
tasks = ee.batch.Task.list()
-for task in tasks:
-    task_id = task.status()['id']
-    task_state = task.status()['state']
-    if task_state == 'RUNNING' or task_state == 'READY':
-        task.cancel()
-        print('Task {} canceled'.format(task_id))
-    else:
-        print('Task {} state is {}'.format(task_id, task_state))
+
tasks = ee.batch.Task.list()
+for task in tasks:
+    task_id = task.status()['id']
+    task_state = task.status()['state']
+    if task_state == 'RUNNING' or task_state == 'READY':
+        task.cancel()
+        print('Task {} canceled'.format(task_id))
+    else:
+        print('Task {} state is {}'.format(task_id, task_state))

Exercise

@@ -5102,38 +5100,38 @@

Exercise

  • Hint2: You need to export the image contained in the clippedImage variable
  • -
    import ee
    -
    -lsib = ee.FeatureCollection('USDOS/LSIB_SIMPLE/2017')
    -australia = lsib.filter(ee.Filter.eq('country_na', 'Australia'))
    -geometry = australia.geometry()
    -
    -terraclimate = ee.ImageCollection('IDAHO_EPSCOR/TERRACLIMATE')
    -tmax = terraclimate.select('tmmx')
    -
    -def scale(image):
    -  return image.multiply(0.1) \
    -    .copyProperties(image,['system:time_start'])
    -
    -tmaxScaled = tmax.map(scale)
    -
    -filtered = tmaxScaled \
    -  .filter(ee.Filter.date('2020-01-01', '2021-01-01')) \
    -  .filter(ee.Filter.bounds(geometry))
    -
    -image_ids = filtered.aggregate_array('system:index').getInfo()
    -print('Total images: ', len(image_ids))
    +
    import ee
    +
    +lsib = ee.FeatureCollection('USDOS/LSIB_SIMPLE/2017')
    +australia = lsib.filter(ee.Filter.eq('country_na', 'Australia'))
    +geometry = australia.geometry()
    +
    +terraclimate = ee.ImageCollection('IDAHO_EPSCOR/TERRACLIMATE')
    +tmax = terraclimate.select('tmmx')
    +
    +def scale(image):
    +  return image.multiply(0.1) \
    +    .copyProperties(image,['system:time_start'])
    +
    +tmaxScaled = tmax.map(scale)
    +
    +filtered = tmaxScaled \
    +  .filter(ee.Filter.date('2020-01-01', '2021-01-01')) \
    +  .filter(ee.Filter.bounds(geometry))
    +
    +image_ids = filtered.aggregate_array('system:index').getInfo()
    +print('Total images: ', len(image_ids))

    Replace the comments with your code.

    -
    for i, image_id in enumerate(image_ids):
    -    exportImage = ee.Image(filtered.filter(ee.Filter.eq('system:index', image_id)).first())
    -    # Clip the image to the region geometry
    -    clippedImage = exportImage.clip(geometry)
    -
    -    ## Create the export task using ee.batch.Export.image.toDrive()
    -
    -    ## Start the task
    +
    for i, image_id in enumerate(image_ids):
    +    exportImage = ee.Image(filtered.filter(ee.Filter.eq('system:index', image_id)).first())
    +    # Clip the image to the region geometry
    +    clippedImage = exportImage.clip(geometry)
    +
    +    ## Create the export task using ee.batch.Export.image.toDrive()
    +
    +    ## Start the task
    Launching multiple tasks using the  Python API

    @@ -5166,99 +5164,99 @@

    Initialization

    associated with your GEE account. Replace the cloud_project with your own project from Google Cloud Console.

    -
    import geemap
    -import ee
    -
    cloud_project = 'spatialthoughts'
    -
    -try:
    -    ee.Initialize(project=cloud_project)
    -except:
    -    ee.Authenticate()
    -    ee.Initialize(project=cloud_project)
    +
    import geemap
    +import ee
    +
    cloud_project = 'spatialthoughts'
    +
    +try:
    +    ee.Initialize(project=cloud_project)
    +except:
    +    ee.Authenticate()
    +    ee.Initialize(project=cloud_project)

    Load and Process the Data

    Load the TerraClimate collection and select the ‘tmmx’ band.

    -
    terraclimate = ee.ImageCollection('IDAHO_EPSCOR/TERRACLIMATE')
    -tmax = terraclimate.select('tmmx')
    +
    terraclimate = ee.ImageCollection('IDAHO_EPSCOR/TERRACLIMATE')
    +tmax = terraclimate.select('tmmx')

    Define a point location for the chart.

    -
    geometry = ee.Geometry.Point([77.57738128916243, 12.964758918835752])
    +
    geometry = ee.Geometry.Point([77.57738128916243, 12.964758918835752])

    Scale the band values so they are in Degree Celcius.

    -
    def scale_image(image):
    -  return ee.Image(image).multiply(0.1)\
    -    .copyProperties(image, ['system:time_start'])
    -
    -tmaxScaled = tmax.map(scale_image)
    +
    def scale_image(image):
    +  return ee.Image(image).multiply(0.1)\
    +    .copyProperties(image, ['system:time_start'])
    +
    +tmaxScaled = tmax.map(scale_image)

    Filter the collection.

    -
    filtered = tmaxScaled.filter(ee.Filter.date('2019-01-01', '2020-01-01')) \
    -                     .filter(ee.Filter.bounds(geometry))
    +
    filtered = tmaxScaled.filter(ee.Filter.date('2019-01-01', '2020-01-01')) \
    +                     .filter(ee.Filter.bounds(geometry))

    To chart an image series in Python, we must first extract the values from each image and create a FeatureCollection.

    -
    def extract_data(image):
    -    stats = image.reduceRegion(**{
    -        'reducer':ee.Reducer.mean(),
    -        'geometry':geometry,
    -        'scale':5000
    -    })
    -    properties = {
    -        'month': image.get('system:index'),
    -        'tmmx': stats.get('tmmx')
    -    }
    -    return ee.Feature(None, properties)
    -
    -data = ee.FeatureCollection(filtered.map(extract_data))
    -
    print(data.first().getInfo())
    +
    def extract_data(image):
    +    stats = image.reduceRegion(**{
    +        'reducer':ee.Reducer.mean(),
    +        'geometry':geometry,
    +        'scale':5000
    +    })
    +    properties = {
    +        'month': image.get('system:index'),
    +        'tmmx': stats.get('tmmx')
    +    }
    +    return ee.Feature(None, properties)
    +
    +data = ee.FeatureCollection(filtered.map(extract_data))
    +
    print(data.first().getInfo())

    Create an Interactive Chart using geemap

    +
    from geemap import chart
    +
    options = {
    +    'title': 'Max Monthly Temperature at Bangalore',
    +    'legend_location': 'top-right',
    +    'height': '500px',
    +    'ylabel': 'Temperature (C)',
    +    'xlabel': 'Date',
    +    'colors': ['blue']
    +}
    from geemap import chart
    -
    options = {
    -    'title': 'Max Monthly Temperature at Bangalore',
    -    'legend_location': 'top-right',
    -    'height': '500px',
    -    'ylabel': 'Temperature (C)',
    -    'xlabel': 'Date',
    -    'colors': ['blue']
    -}
    -
    chart.feature_byFeature(data, 'month', ['tmmx'], **options)
    +class="sourceCode python">chart.feature_byFeature(data, 'month', ['tmmx'], **options)

    Create a chart using Matplotlib

    We can convert a FeatureCollection to a DataFrame using geemap helper function ee_to_pandas.

    -
    import geemap
    -df = geemap.ee_to_pandas(data)
    -
    df
    +
    import geemap
    +df = geemap.ee_to_pandas(data)
    +
    df

    Now we have a regular Pandas dataframe that can be plotted with matplotlib.

    -
    %matplotlib inline
    -import matplotlib.pyplot as plt
    -
    fig, ax = plt.subplots()
    -fig.set_size_inches(20,10)
    -
    -
    -df.plot(ax=ax,
    -        title='Max Monthly Temperature at Bangalore',
    -        x='month',
    -        ylabel='Temperature (C)',
    -        kind='line')
    -plt.tight_layout()
    +
    %matplotlib inline
    +import matplotlib.pyplot as plt
    +
    fig, ax = plt.subplots()
    +fig.set_size_inches(20,10)
    +
    +
    +df.plot(ax=ax,
    +        title='Max Monthly Temperature at Bangalore',
    +        x='month',
    +        ylabel='Temperature (C)',
    +        kind='line')
    +plt.tight_layout()

    Exercise

    @@ -5294,74 +5292,74 @@

    05. Automating Downloads

  • Create a new file named download_data.py with the content shown below.
  • -
    import datetime
    -import ee
    -import csv
    -import os
    -
    -cloud_project = 'spatialthoughts'
    -
    -try:
    -    ee.Initialize(project=cloud_project)
    -except:
    -    ee.Authenticate()
    -    ee.Initialize(project=cloud_project)
    -
    -# Get current date and convert to milliseconds 
    -start_date = ee.Date.fromYMD(2022, 1, 1)
    -end_date = start_date.advance(1, 'month')
    -
    -date_string = end_date.format('YYYY_MM')
    -filename = 'ssm_{}.csv'.format(date_string.getInfo())
    -
    -# Saving to current directory. You can change the path to appropriate location
    -output_path = os.path.join(filename)
    -
    -# Datasets
    -# SMAP is in safe mode and not generating new data since August 2022
    -# https://nsidc.org/data/user-resources/data-announcements/user-notice-smap-safe-mode
    -soilmoisture = ee.ImageCollection("NASA_USDA/HSL/SMAP10KM_soil_moisture")
    -admin2 = ee.FeatureCollection("FAO/GAUL_SIMPLIFIED_500m/2015/level2")
    -
    -# Filter to a state
    -karnataka = admin2.filter(ee.Filter.eq('ADM1_NAME', 'Karnataka'))
    -
    -# Select the ssm band
    -ssm  = soilmoisture.select('ssm')
    -
    -filtered = ssm .filter(ee.Filter.date(start_date, end_date))
    -
    -mean = filtered.mean()
    -
    -stats = mean.reduceRegions(**{
    -  'collection': karnataka,
    -  'reducer': ee.Reducer.mean().setOutputs(['meanssm']),
    -  'scale': 10000,
    -  'crs': 'EPSG:32643'
    -  })
    -
    -# Select columns to keep and remove geometry to make the result lightweight
    -# Change column names to match your uploaded shapefile
    -columns = ['ADM2_NAME', 'meanssm']
    -exportCollection = stats.select(**{
    -    'propertySelectors': columns,
    -    'retainGeometry': False})
    -
    -features = exportCollection.getInfo()['features']
    -
    -data = []
    -
    -for f in features:
    -    data.append(f['properties'])
    -
    -field_names = ['ADM2_NAME', 'meanssm']
    -
    -with open(output_path, 'w') as csvfile:
    -    writer = csv.DictWriter(csvfile, fieldnames = field_names)
    -    writer.writeheader()
    -    writer.writerows(data)
    -    print('Success: File written at', output_path)
    +
    import datetime
    +import ee
    +import csv
    +import os
    +
    +cloud_project = 'spatialthoughts'
    +
    +try:
    +    ee.Initialize(project=cloud_project)
    +except:
    +    ee.Authenticate()
    +    ee.Initialize(project=cloud_project)
    +
    +# Get current date and convert to milliseconds 
    +start_date = ee.Date.fromYMD(2022, 1, 1)
    +end_date = start_date.advance(1, 'month')
    +
    +date_string = end_date.format('YYYY_MM')
    +filename = 'ssm_{}.csv'.format(date_string.getInfo())
    +
    +# Saving to current directory. You can change the path to appropriate location
    +output_path = os.path.join(filename)
    +
    +# Datasets
    +# SMAP is in safe mode and not generating new data since August 2022
    +# https://nsidc.org/data/user-resources/data-announcements/user-notice-smap-safe-mode
    +soilmoisture = ee.ImageCollection("NASA_USDA/HSL/SMAP10KM_soil_moisture")
    +admin2 = ee.FeatureCollection("FAO/GAUL_SIMPLIFIED_500m/2015/level2")
    +
    +# Filter to a state
    +karnataka = admin2.filter(ee.Filter.eq('ADM1_NAME', 'Karnataka'))
    +
    +# Select the ssm band
    +ssm  = soilmoisture.select('ssm')
    +
    +filtered = ssm .filter(ee.Filter.date(start_date, end_date))
    +
    +mean = filtered.mean()
    +
    +stats = mean.reduceRegions(**{
    +  'collection': karnataka,
    +  'reducer': ee.Reducer.mean().setOutputs(['meanssm']),
    +  'scale': 10000,
    +  'crs': 'EPSG:32643'
    +  })
    +
    +# Select columns to keep and remove geometry to make the result lightweight
    +# Change column names to match your uploaded shapefile
    +columns = ['ADM2_NAME', 'meanssm']
    +exportCollection = stats.select(**{
    +    'propertySelectors': columns,
    +    'retainGeometry': False})
    +
    +features = exportCollection.getInfo()['features']
    +
    +data = []
    +
    +for f in features:
    +    data.append(f['properties'])
    +
    +field_names = ['ADM2_NAME', 'meanssm']
    +
    +with open(output_path, 'w') as csvfile:
    +    writer = csv.DictWriter(csvfile, fieldnames = field_names)
    +    writer.writeheader()
    +    writer.writerows(data)
    +    print('Success: File written at', output_path)
    1. From the terminal, navigate to the directory where you have created the file and type the command below to run the script.