Saturday, January 4, 2025

GEE - Measuring Land Surface Temprature from Landsat 8

 The Javascript code below will calculate Land Surface Temperature (LST) from landsat 8 imagery.



Adopted from this medium post: Analyzing Land Surface Temperature (LST) with Landsat 8 Data in Google Earth Engine - https://medium.com/@ridhomuh002/analyzing-land-surface-temperature-lst-with-landsat-8-data-in-google-earth-engine-f4dd7ca28e70
// Load AOI or ROI
var aoi = ee.FeatureCollection("users/forum2k9/Karu"); // Karu polygon - change if you need different 
Map.addLayer(aoi, {}, 'AOI - Karu');
Map.centerObject(aoi, 10);

// Define data retrieval Start and End Date
var startDate = '2023-03-01';
var endDate = '2023-10-31';


// Applies scaling factors. Recalculating from K to °C
function applyScaleFactors(image) {
 // Scale and offset values for optical bands
 var opticalBands = image.select('SR_B.').multiply(0.0000275).add(-0.2);
 // Scale and offset values for thermal bands
 var thermalBands = image.select('ST_B.*').multiply(0.00341802).add(149.0).subtract(273.15); // I'm used to the °C, In case of K delete the subtract part
 // Add scaled bands to the original image
 return image.addBands(opticalBands, null, true)
 .addBands(thermalBands, null, true);
}

// Function to Mask Clouds and Cloud Shadows in Landsat 8 Imagery
function cloudMask(image) {
  // Define cloud shadow and cloud bitmasks
  var cloudShadowBitmask = (1 << 4);
  var cloudBitmask = (1 << 3);
  var cirrus = (1 << 2);             // I add also mask for cirrus. Suprisely use of Bit 6 (Clear of any clouds) is not working - anybody know why? 
  // Select the Quality Assessment (QA) band for pixel quality information
  var qa = image.select('QA_PIXEL');
  // Create a binary mask to identify clear conditions (both cloud and cloud shadow bits set to 0)
  var mask = qa.bitwiseAnd(cloudShadowBitmask).eq(0)
                .and(qa.bitwiseAnd(cloudBitmask).eq(0))
                .and(qa.bitwiseAnd(cirrus).eq(0));
  // Update the original image, masking out cloud and cloud shadow-affected pixels
  return image.updateMask(mask);
}

// Import and preprocess Landsat 8 imagery
var image = ee.ImageCollection("LANDSAT/LC08/C02/T1_L2")
              .filterBounds(aoi)
              .filterDate(startDate, endDate)
              .map(applyScaleFactors)
              .map(cloudMask)
              .median()
              .clip(aoi);

// Define visualization parameters for True Color imagery (bands 4, 3, and 2)
var visualization = {
  bands: ['SR_B4', 'SR_B3', 'SR_B2'],
  min: 0.0,
  max: 0.2,
};

// Add the processed image to the map with the specified visualization
Map.addLayer(image, visualization, 'True Color 432');
print(image);
// Add the LST Layer to the Map with Custom Visualization Parameters
Map.addLayer(image.select('ST_B10'), {
  min: 19, // Minimum LST value
  max: 43, // Maximum LST value
  palette: [
    '040274', '040281', '0502a3', '0502b8', '0502ce', '0502e6',
    '0602ff', '235cb1', '307ef3', '269db1', '30c8e2', '32d3ef',
    '3be285', '3ff38f', '86e26f', '3ae237', 'b5e22e', 'd6e21f',
    'fff705', 'ffd611', 'ffb613', 'ff8b13', 'ff6e08', 'ff500d',
    'ff0000', 'de0101', 'c21301', 'a71001', '911003'
  ]}, 'Land Surface Temperature');




// // Calculate Normalized Difference Vegetation Index (NDVI)
// var ndvi = image.normalizedDifference(['SR_B5', 'SR_B4']).rename('NDVI');

// // Define NDVI Visualization Parameters
// var ndviPalette = {
// min: -1,
// max: 1,
// palette: ['blue', 'white', 'green']
// };

// Map.addLayer(ndvi, ndviPalette, 'NDVI Karu')


// Adopted from: https://github.com/panmoravec/GEE/blob/main/LST%20Landsat8


Export.image.toDrive({
  image: image.select('ST_B10'),
  description: 'LST_2023',
  scale: 30,
  region: aoi,
  fileFormat: 'GeoTIFF',
  maxPixels: 3100100100,  // set bigger in case of need
});


// Another version for.... Land surface temperature (LST) Monitoring using Landsat imagery using Google Earth Engine

1) https://code.earthengine.google.com/525ebe138c6d2198a0fd464f7cfc967a 

var LLST = require('users/sofiaermida/landsat_smw_lst:modules/Landsat_LST.js')
var Lan8 = LLST.collection('L8', '2021-1-01', '2021-12-31', geometry, true).select('LST').map(function(image){
  return image.subtract(273.15).clip(geometry).set('system:time_start',image.get('system:time_start'))
})
print("chart");
// print(Lan8)
print(
  ui.Chart.image.series({
    imageCollection: Lan8,
    region: geometry,
    reducer: ee.Reducer.mean(),
    scale: 30,
    xProperty: 'system:time_start'
  }));


Map.centerObject(geometry,10)

Map.addLayer(geometry)

Map.addLayer(Lan8.mean().clip(geometry), imageVisParam4 ,'Dhaka City LST')


====================================================================================


2) https://code.earthengine.google.com/3fdeff0b82b6fc98022a11facd30d269 


// Land surface temperature
var landsat8 = ee.ImageCollection("LANDSAT/LC08/C01/T1_SR")
             .filterBounds(ROI)
             .filterDate('2020-01-01','2022-12-31')
             //.filterMetadata('CLOUD_COVER', 'less_than', 1)
             .median()
             .clip(ROI)
print(landsat8)
Map.addLayer(landsat8, imageVisParam3, 'Satelliteimage with clouds')
Map.centerObject(landsat8,8)


// CLOUD MASK
//Bits 3 and 5 are cloud shadow and cloud, respectively
var  maskL8sr = function(col){
  var cloudShadowBitMask = (1 << 3)
  var cloudsBitMask = (1 << 5)
  
  // Get the piixel QA band
  var qa  = col.select('pixel_qa')
  
   var mask = qa.bitwiseAnd(cloudShadowBitMask).eq(0)
            .and(qa.bitwiseAnd(cloudsBitMask).eq(0))
  return col.updateMask(mask)
}

// visualization params
var vizParams = {
bands: ['B5', 'B6', 'B4'],
min: 0,
max: 4000,
gamma: [1, 0.9, 1.1]
};

var vizParams2 = {
bands: ['B4', 'B3', 'B2'],
min: 0,
max: 3000,
gamma: 1.4,
};

//load the collection
{
  var col = ee.ImageCollection("LANDSAT/LC08/C01/T1_SR")
     .map(maskL8sr)
     .filterBounds(ROI)
     .filterDate('2020-01-01','2022-12-31')
     .mean()
     .clip(ROI)
}
print(col)
Map.addLayer(col, imageVisParam2, 'Cloud_Free')

// NDVI CALCULATION

var NDVI = col.normalizedDifference(['B5','B4'])
Map.addLayer(NDVI, {min: -1, max: 0.7, palette: ['blue','white','yellow','red']}, 'Created NDVI')
Map.centerObject(NDVI, 8)

print(NDVI)

// selecting Band10 which is the surface temperature
// thermal band 10
var thermalband  = col.select('B10').multiply(0.1)
Map.addLayer(thermalband, {min:294.388, max:300.514, palette:['yellow','green','blue']}, 'Thermal band')

//finding minimum and maximum NDVI

var min = ee.Number(NDVI.reduceRegion({   //reduce region is used to calculate a statistcal function for each region
reducer: ee.Reducer.min(),
geometry: ROI,
scale: 30,
maxPixels: 1e9
}).values().get(0));
print(min, 'min');
  
var max = ee.Number(NDVI.reduceRegion({
reducer: ee.Reducer.max(),
geometry: ROI,
scale: 30,
maxPixels: 1e9
}).values().get(0));
print(max, 'max')
  
//fractional vegetation
var fv = (NDVI.subtract(min).divide(max.subtract(min))).pow(ee.Number(2))
print(fv,'fv');
Map.addLayer(fv, {},'Fractional Vegetation');

//Emissivity
// you will have to find the emissivity of natural and town
 var a = ee.Number(0.004)
 var b = ee.Number(0.986)
 var EM  = fv.multiply(a).add(b).rename('EMM')
 Map.addLayer(EM, imageVisParam, 'Emissivity')
 
 //FINDING LAND SURFACE TEMPERATURE
 //LST in celsius degree bring -273.15
 //NB: In kelvin don't bring -273.15
 var LST = thermalband.expression(
   '(Tb/(1 + (0.00115*(Tb/1.438))*log(Ep)))-273.15',{
     'Tb':thermalband.select('B10'),
     'Ep':EM.select('EMM')
     
   }).rename('Land surface Temperature');
   var viz =  {min: 22.298638087799763, max:29.22115281845691, palette: [
'040274', '040281', '0502a3', '0502b8', '0502ce', '0502e6',
'0602ff', '235cb1', '307ef3', '269db1', '30c8e2', '32d3ef',
'3be285', '3ff38f', '86e26f', '3ae237', 'b5e22e', 'd6e21f',
'fff705', 'ffd611', 'ffb613', 'ff8b13', 'ff6e08', 'ff500d',
'ff0000', 'de0101', 'c21301', 'a71001', '911003'
 ]}
   Map.addLayer(LST, viz, 'LST')
   
   
//TIMESSERIES


//print(image_drycollection)

//print(ui.Chart.image.series(LST, ROI, ee.Reducer.mean(), 1000, 'system:time_start'))

// Create the title label.
var title = ui.Label('Shama LandSurface Temperature Map Year of 2022 ');
title.style().set('position', 'top-center');
Map.add(title);

var title = ui.Label('Prepared by Kangah Desmond');
title.style().set('position', 'bottom-center');
Map.add(title);

// set position of panel
var legend = ui.Panel({
style: {
position: 'bottom-left',
padding: '8px 15px'
}
});

// Create the title label.
//var title = ui.Label('Dhaka City LandSurface Temperature Map Year of 2020 ');
//title.style().set('position', 'top-center');
//Map.add(title);
 
// Create legend title
var legendTitle = ui.Label({
value: 'LandSurface Temperature (degC)',
style: {
fontWeight: 'bold',
fontSize: '18px',
margin: '0 0 4px 0',
padding: '0'
}
});
 
// Add the title to the panel
legend.add(legendTitle);
 
// create the legend image
var lon = ee.Image.pixelLonLat().select('latitude');
var gradient = lon.multiply((viz.max-viz.min)/100.0).add(viz.min);
var legendImage = gradient.visualize(viz);
 
// create text on top of legend
var panel = ui.Panel({
widgets: [
ui.Label(viz['max'])
],
});
 
legend.add(panel);
 
// create thumbnail from the image
var thumbnail = ui.Thumbnail({
image: legendImage,
params: {bbox:'0,0,10,100', dimensions:'10x200'},
style: {padding: '1px', position: 'bottom-center'}
});
 
// add the thumbnail to the legend
legend.add(thumbnail);
 
// create text on top of legend
var panel = ui.Panel({
widgets: [
ui.Label(viz['min'])
],
});
 
legend.add(panel);
 
Map.add(legend);

// Create the title label.
//var title = ui.Label('Prepared By: Mijanur Rahman');
//title.style().set('position', 'bottom-right');
//Map.add(title);

Also Mirza Waleed has a great publication on LST and YouTube explanation on same publication that you should checkout.


Thank you for reading.

Wednesday, January 1, 2025

How to load shapefile into Google Earth Engine (GEE)

 To load a shapefile into Google Earth Engine (GEE), you can use the Code Editor, the Asset Manager, or the command line interface (CLI):

In this guide, I will use the 'Asset Manager' tab and access the shapefile from the Code Editor. Let get started...

Step 1: Go to code.earthengine.google.com and sign in. Click the “Assets” tab at the top left of the screen, and then click on the red “New” button. Select the “Shape files” option from the dropdown menu.


Step 2: In the pop-up window, click the red “Select” button and navigate to where your shapefile is stored. Select all of the files that make up your shapefile (.shp, .shx, .prj, .dbf, .cpg). You can hold the control key to select multiple files at once. Then click “Open” to add the selected files to the upload list. Update the Asset Name in the Asset ID section if needed. Then, click the “Upload” button at the bottom.



Step 3: Navigate to the 'Task' tab, there you see the submitted task in action, you don't have to do anything. Just watch the task will turn blue when the upload is complete.


Step 4: Click on the name of the name of the newly uploaded shapefile under the “Assets” tab to open its details windows where you will all meta data related to it including its "Table ID". The "Table ID" is what we will copy for use in the javascript code editor.



That is it!

Wednesday, December 25, 2024

Preparing Flight Plan for Drone Survey using MissionPlanner

 To conduct a 'Drone Survey' of a site/location, it is required that a proper flight plan should be prepared for the success of the survey mission. Some drones do come with their own software for making the flight plan, however there exist universal tools like the 'Mission Planner' that supports may types of camera on drones thanks to Michael Oborne of Ardupilot.


The 'Mission Planner' software is not only useful for drone surveying, it has many other uses including GNSS RTK (Real-Time Kinematic, Global Navigation Satellite System) processing among others.


Creating the Flight Plan

You will work under the 'PLAN' tab to prepare your 'Flight Plan' for drone surveying.


Switch to the "Plan" tab, then zoom to your study area and position the home point at a suitable location.


Right click and draw polygon over the study area. You may also use the polygon icon to the left for the drawing.



Next, right click and select 'Auto WP' and 'Survey (Grid)'. This will launch a new window where you can select various setting for the drone, camera, forward and side overlaps, images,  etc.

Check the 'Advanced Options' for more configuration settings. When you are satisfied with your settings, click on the 'Accept' button to apply the settings.

Based on the selected study area, type of camera, flying altitude of 120m, camera angle of 360° and flying speed of 20m/s the flight mission has the following statistics:-



Happy Flying!

Sunday, December 22, 2024

Different ways for running python script in QGIS

QGIS was written in  C++, Python and Qt framework.

In this post, I will share with you the common places where you can use or run python script within QGIS. If you already know python and some basics of QGIS, then this will be great exposure to know how to automate different parts of the open source GIS software.

Python in QGIS can be used to interact and automate GIS data in:-

1) Python Console

Plugins >> Python Console (Ctrl+Alt+P)




2) Label Expression



3) Field Calculator



4) Processing Toolbox



5) Plugins



6) Stand alone QGIS Apps



7) Script Runner Plugin



8) Project Event - Open, Close and Save






Thursday, December 12, 2024

Preparing a Flag map of Africa 24 Teams Set For AFCON 2025 In Morocco

 This task consist of two parts, the GIS and Graphic Design parts. To complete the task, I will use two software namely QGIS for the GIS part of the project and GIMP for the graphic design part of the project.

Data Sources:

  • The Map is the world map from QGIS resource data
  • The Flags were made by Hampus Nilsson

Procedures
Step 1: Using the QGIS software, query the world map for African countries and the 24 teams that qualified for the AFCON 2025 as seen below.



Now we know the countries to be designed with flags respectively.

Step 2: Lets make sure the flags we downloaded are available for the 24 countries. The flag source uses two alphabets to name the flags, this could be challenging but fortunately there is a JSON file in the directory the associalte the two two alphabet code with the full name of the countries.


'BW, 'EG', 'ZR', 'SD', 'ZM', 'NG', 'BJ', 'UG', 'TZ', 'GA', 'CI', 'DZ', 'SN', 'CM', 'GQ', 'BF', 'MZ', 'ZA', 'MA', 'TN', 'AO', 'KM', 'ML', 'ZW'

The 'WB_A2' attribute column matches the names of the flags, so that is what we will use the get the names of the country for each flag.

Saturday, December 7, 2024

How to make country flag map in QGIS

Using graphics software like GIMP/Photoshop, Inkscape/Illustrator, we could fill polygons shapes with an image easily, now how do we do the same in QGIS?


Yes, QGIS is not a graphic software yet it has the capability to fill polygons shapes with an image. Let make use of a flag image to fill a country's polygon shape. I will use Nigerian flag and its boundary polygon shape for this demonstration however, you can prety much use any image with any polygon shape of your chosen.

In other words, all we are doing is to insert image in shape in QGIS. Lets see how we can achieve it.

First get the image flag and the vector polygon of Nigeria administrative boundaries.

Now, from the polygon's property window, open symbology tab and select 'Raster Image Fill' as seen below. You can now set path to the image, set the size of the image, set its opacity, etc as you require fit.


One important setting that determine if the image fills the entire polygon layer or each polygon shape in the layer is the "Coord mode" which can either be det to 'object' or 'viewport'.


With some tweaks and tricks, we can achieve what the graphics tool can do in QGIS. Though this kind of map designs are effectively done in the graphics software.

Happy mapping.

Sunday, December 1, 2024

50 Globe Projections Types in QGIS

 A globe projection, also known as a map projection, is a method for representing the Earth's surface on a flat surface, such as a map or computer screen.

Lets see the following 50 map projections in QGIS software;-
(1) Natural Earth Projection - EPSG: 54077
(2) Sphere Aitoff Projection - EPSG: 53043
(3) World Azimuthal Equidistant Projection - EPSG: 54032
(4) Sphere Behrmann Projection - EPSG: 53017
(5) Sphere Bonne Projection - EPSG: 53024
(6) World Cassini Projection - EPSG: 54028
(7) Sphere Compact Miller Projection - EPSG: 53080
(8) Sphere Craster Parabolic Projection - EPSG: 53046
(9) Equal Area Cylindrical Projection - EPSG: 54034
(10) Eckert-1 Projection - EPSG: 53015
(11) Eckert-2 Projection - EPSG: 53014
(12) Eckert-3 Projection - EPSG: 53013
(13) Eckert-4 Projection - EPSG: 53012
(14) Eckert-5 Projection - EPSG: 53011
(15) Eckert-6 Projection - EPSG: 53010
(16) Sphere Equal Earth Americas Projection - EPSG: 53036
(17) Africa Equidistant Conic Projection - EPSG: 102023
(18) World Equidistant Cylindrical Projection - EPSG: 4088
(19) Sphere Flat Polar Quartic Projection - EPSG: 53045
(20) Gall Stereographic Projection - EPSG: 53016
(21) Gall and Peters Projection - EPSG: 100000
(22) Goode Homolosine Land Projection - EPSG: 54052
(23) Google Maps Global Mercator Projection Projection - EPSG: 900913
(24) MILLER GEOPORTAIL Projection - EPSG: MILLER
(25) North Pole LAEA Alaska Projection Projection - EPSG: 3572
(26) American Samoa Lambert Projection - EPSG: 2155
(27) South Pole Orthographic Projection - EPSG: 102037
(28) North Pole Orthographic Projection - EPSG: 102035
(29) NSIDC EASE-Grid Global Projection - EPSG: 3410
(30) Patterson Projection - EPSG: 53079
(31) Plate Caree Projection - EPSG: 32662
(32) Panama Polyconic Projection - EPSG: 5472
(33) Polyconic Projection - EPSG: 53021
(34) Quartic Authalic Projection - EPSG: 53022
(35) ICS Robinson Projection - EPSG: 102926
(36) Africa Sinusoidal Projection - EPSG: 102011
(37) JAXA Snow Depth Polar Stereographic North Projection - EPSG: 5890
(38) Times Projection - EPSG: 53048
(39) Vanua Levu 1915 Projection - EPSG: 4748
(40) Vertical Perspective Projection - EPSG: 53049
(41) Wagner IV Projection - EPSG: 53074
(42) Winkel I Projection - EPSG: 53018
(43) Winkel II Projection - EPSG: 53019
(44) Winkel Tripel NGS Projection - EPSG: 53042
(45) MAGNA Leticia Amazonas 1994 Projection - EPSG: 102767
(46) Mexico ITRF2008 Projection - EPSG: 6363
(47) Equal Earth Asia Pacific Projection - EPSG: 53037
(48) France Metropolitaine projection Geoportail Projection - EPSG: GEOPORTALFXX
(49) RRAF 1991 Projection - EPSG: 4640
(50) S-JTSK Krovak East North  Projection - EPSG: 102067





Wednesday, September 25, 2024

Fantasy Maps, Fantasy Cartography, Fictional Map-making, or Geofiction

Over the years, I have encountered clients who needed fantasy maps for books or novels which they are writing. These authors usually contract me to develop an imaginative map to complement the characters or world they are building in their storyline.

I will like to call these type of fantasy maps as imaginative map or mystic maps or mythical maps or adventure maps or fictional maps or map of nowhere. Whatever you decide to call such maps, it is basically a map that doesn't represent any location in the real world.

In this post, let's take a look at: What is a fantasy map? What professionals are responsible for making fantasy map? How to make a fantasy map? What technical skills are required for making fantasy maps? What elements are required to be shown on a fantasy map?


What is a fantasy map?

According to Wikipedia, fantasy map is a type of map design that visually presents an imaginary world or concept, or represents a real-world geography in a fantastic style.


Where are fantasy maps use?

Fantasy maps are used in a variety of contexts, primarily where imagination and creativity play a significant role such as in novel/book, Role-Playing Game (RPG), film/drama, or just for fun story telling.


What professionals are responsible for making fantasy map?

Cartographers are the professionals primarily responsible for creating fantasy maps. While the term "cartographer" is often associated with real-world geography, it can also extend to the imaginative realms of fantasy.

In addition to cartographers, artists, writers, and game designers also contribute to the creation of fantasy maps. Artists visualize the map, bringing it to life with illustrations and colors. Writers develop the backstory and lore of the fantasy world, providing context for the map. Game designers incorporate the map into the gameplay, ensuring that it is both visually appealing and functional.


What technical skills are required for making fantasy maps?

Technical Skills for Making Fantasy Maps are;-

  • Drawing and Illustration skills
  • Design and Layout skills
  • Geography and Cartography skills


What elements are required to be shown on a fantasy map?

The Essential Elements for a Fantasy Map include: Landmasses, Bodies of water, Terrain, Borders, Roads, Enchanted forests, Compass rose, Legend or key.


How to make a fantasy map?

Since fantasy maps are not really representing a geographical location in reality, a GIS software won't be necessary. There are tools/software that specializes in making fantasy maps such as:

This tools mainly focus on bringing imaginations into map instead of enforcing geographic location representation.

Also if you are skilled in graphic design software such as Photoshop, GIMP, Inkscape, Coreldraw etc you could make fantasy maps.


Examples of fantasy maps

A fantastic examples of fantasy maps I found is that in the portfolio by Deranged Doctor Design (DDD). On the DDD portfolio page, you will see several examples of how a digital fantasy map was made out of hand drawn imagination.

Another source is the MapEffects.co by Josh Stolarz

Another example is "The Edge Chronicles started with the map"

Also meet Jessica Khoury with beautiful fictional maps in her portfolio.

Monday, September 2, 2024

Extracting image Geographic metadata in Python

 Here I got hundreds of pictures captured during a field trip. Each picture has its GPS or geographical coordinates embedded in its metadata. These pictures' metadata are stored in EXIF standard (read about other standards from this link). EXIF stands for Exchangeable Image File Format. It stores technical information about an image and its capture method, such as exposure settings, capture time, GPS location information and camera model.



The EXIF metadata can contain many attributes (approximately 270 attributes) as listed below. This attribute will depend on the camera capability and the purpose of the picture captured.

InteropIndex, ProcessingSoftware, NewSubfileType, SubfileType, ImageWidth, ImageLength, BitsPerSample, Compression, PhotometricInterpretation, Thresholding, CellWidth, CellLength, FillOrder, DocumentName, ImageDescription, Make, Model, StripOffsets, Orientation, SamplesPerPixel, RowsPerStrip, StripByteCounts, MinSampleValue, MaxSampleValue, XResolution, YResolution, PlanarConfiguration, PageName, FreeOffsets, FreeByteCounts, GrayResponseUnit, GrayResponseCurve, T4Options, T6Options, ResolutionUnit, PageNumber, TransferFunction, Software, DateTime, Artist, HostComputer, Predictor, WhitePoint, PrimaryChromaticities, ColorMap, HalftoneHints, TileWidth, TileLength, TileOffsets, TileByteCounts, SubIFDs, InkSet, InkNames, NumberOfInks, DotRange, TargetPrinter, ExtraSamples, SampleFormat, SMinSampleValue, SMaxSampleValue, TransferRange, ClipPath, XClipPathUnits, YClipPathUnits, Indexed, JPEGTables, OPIProxy, JPEGProc, JpegIFOffset, JpegIFByteCount, JpegRestartInterval, JpegLosslessPredictors, JpegPointTransforms, JpegQTables, JpegDCTables, JpegACTables, YCbCrCoefficients, YCbCrSubSampling, YCbCrPositioning, ReferenceBlackWhite, XMLPacket, RelatedImageFileFormat, RelatedImageWidth, RelatedImageLength, Rating, RatingPercent, ImageID, CFARepeatPatternDim, BatteryLevel, Copyright, ExposureTime, FNumber, IPTCNAA, ImageResources, ExifOffset, InterColorProfile, ExposureProgram, SpectralSensitivity, GPSInfo, ISOSpeedRatings, OECF, Interlace, TimeZoneOffset, SelfTimerMode, SensitivityType, StandardOutputSensitivity, RecommendedExposureIndex, ISOSpeed, ISOSpeedLatitudeyyy, ISOSpeedLatitudezzz, ExifVersion, DateTimeOriginal, DateTimeDigitized, OffsetTime, OffsetTimeOriginal, OffsetTimeDigitized, ComponentsConfiguration, CompressedBitsPerPixel, ShutterSpeedValue, ApertureValue, BrightnessValue, ExposureBiasValue, MaxApertureValue, SubjectDistance, MeteringMode, LightSource, Flash, FocalLength, Noise, ImageNumber, SecurityClassification, ImageHistory, TIFF/EPStandardID, MakerNote, UserComment, SubsecTime, SubsecTimeOriginal, SubsecTimeDigitized, AmbientTemperature, Humidity, Pressure, WaterDepth, Acceleration, CameraElevationAngle, XPTitle, XPComment, XPAuthor, XPKeywords, XPSubject, FlashPixVersion, ColorSpace, ExifImageWidth, ExifImageHeight, RelatedSoundFile, ExifInteroperabilityOffset, FlashEnergy, SpatialFrequencyResponse, FocalPlaneXResolution, FocalPlaneYResolution, FocalPlaneResolutionUnit, SubjectLocation, ExposureIndex, SensingMethod, FileSource, SceneType, CFAPattern, CustomRendered, ExposureMode, WhiteBalance, DigitalZoomRatio, FocalLengthIn35mmFilm, SceneCaptureType, GainControl, Contrast, Saturation, Sharpness, DeviceSettingDescription, SubjectDistanceRange, ImageUniqueID, CameraOwnerName, BodySerialNumber, LensSpecification, LensMake, LensModel, LensSerialNumber, CompositeImage, CompositeImageCount, CompositeImageExposureTimes, Gamma, PrintImageMatching, DNGVersion, DNGBackwardVersion, UniqueCameraModel, LocalizedCameraModel, CFAPlaneColor, CFALayout, LinearizationTable, BlackLevelRepeatDim, BlackLevel, BlackLevelDeltaH, BlackLevelDeltaV, WhiteLevel, DefaultScale, DefaultCropOrigin, DefaultCropSize, ColorMatrix1, ColorMatrix2, CameraCalibration1, CameraCalibration2, ReductionMatrix1, ReductionMatrix2, AnalogBalance, AsShotNeutral, AsShotWhiteXY, BaselineExposure, BaselineNoise, BaselineSharpness, BayerGreenSplit, LinearResponseLimit, CameraSerialNumber, LensInfo, ChromaBlurRadius, AntiAliasStrength, ShadowScale, DNGPrivateData, MakerNoteSafety, CalibrationIlluminant1, CalibrationIlluminant2, BestQualityScale, RawDataUniqueID, OriginalRawFileName, OriginalRawFileData, ActiveArea, MaskedAreas, AsShotICCProfile, AsShotPreProfileMatrix, CurrentICCProfile, CurrentPreProfileMatrix, ColorimetricReference, CameraCalibrationSignature, ProfileCalibrationSignature, AsShotProfileName, NoiseReductionApplied, ProfileName, ProfileHueSatMapDims, ProfileHueSatMapData1, ProfileHueSatMapData2, ProfileToneCurve, ProfileEmbedPolicy, ProfileCopyright, ForwardMatrix1, ForwardMatrix2, PreviewApplicationName, PreviewApplicationVersion, PreviewSettingsName, PreviewSettingsDigest, PreviewColorSpace, PreviewDateTime, RawImageDigest, OriginalRawFileDigest, SubTileBlockSize, RowInterleaveFactor, ProfileLookTableDims, ProfileLookTableData, OpcodeList1, OpcodeList2, OpcodeList3, NoiseProfile, SpatialFrequencyResponse, SubjectLocation, ExposureIndex, CFAPattern, FlashEnergy

In our case here, the EXIF metadata attribute we are interested in is the GPSInfo. As seen above, it contains the latitude, longitude and altitude of the picture.

Lets extract this attribute information using python.

We are going to use the PIL library to read the EXIF metadata into pandas dataframe. Lets import the required modules.

import glob
import pandas as pd
from PIL import Image, ExifTags
from PIL.ExifTags import TAGS


The dictionary of all available EXIF metadata attributes can be accessed using ExifTags.TAGS as follow:-

{1: 'InteropIndex', 11: 'ProcessingSoftware', 254: 'NewSubfileType', 255: 'SubfileType', 256: 'ImageWidth', 257: 'ImageLength', 258: 'BitsPerSample', 259: 'Compression', 262: 'PhotometricInterpretation', 263: 'Thresholding', 264: 'CellWidth', 265: 'CellLength', 266: 'FillOrder', 269: 'DocumentName', 270: 'ImageDescription', 271: 'Make', 272: 'Model', 273: 'StripOffsets', 274: 'Orientation', 277: 'SamplesPerPixel', 278: 'RowsPerStrip', 279: 'StripByteCounts', 280: 'MinSampleValue', 281: 'MaxSampleValue', 282: 'XResolution', 283: 'YResolution', 284: 'PlanarConfiguration', 285: 'PageName', 288: 'FreeOffsets', 289: 'FreeByteCounts', 290: 'GrayResponseUnit', 291: 'GrayResponseCurve', 292: 'T4Options', 293: 'T6Options', 296: 'ResolutionUnit', 297: 'PageNumber', 301: 'TransferFunction', 305: 'Software', 306: 'DateTime', 315: 'Artist', 316: 'HostComputer', 317: 'Predictor', 318: 'WhitePoint', 319: 'PrimaryChromaticities', 320: 'ColorMap', 321: 'HalftoneHints', 322: 'TileWidth', 323: 'TileLength', 324: 'TileOffsets', 325: 'TileByteCounts', 330: 'SubIFDs', 332: 'InkSet', 333: 'InkNames', 334: 'NumberOfInks', 336: 'DotRange', 337: 'TargetPrinter', 338: 'ExtraSamples', 339: 'SampleFormat', 340: 'SMinSampleValue', 341: 'SMaxSampleValue', 342: 'TransferRange', 343: 'ClipPath', 344: 'XClipPathUnits', 345: 'YClipPathUnits', 346: 'Indexed', 347: 'JPEGTables', 351: 'OPIProxy', 512: 'JPEGProc', 513: 'JpegIFOffset', 514: 'JpegIFByteCount', 515: 'JpegRestartInterval', 517: 'JpegLosslessPredictors', 518: 'JpegPointTransforms', 519: 'JpegQTables', 520: 'JpegDCTables', 521: 'JpegACTables', 529: 'YCbCrCoefficients', 530: 'YCbCrSubSampling', 531: 'YCbCrPositioning', 532: 'ReferenceBlackWhite', 700: 'XMLPacket', 4096: 'RelatedImageFileFormat', 4097: 'RelatedImageWidth', 4098: 'RelatedImageLength', 18246: 'Rating', 18249: 'RatingPercent', 32781: 'ImageID', 33421: 'CFARepeatPatternDim', 33423: 'BatteryLevel', 33432: 'Copyright', 33434: 'ExposureTime', 33437: 'FNumber', 33723: 'IPTCNAA', 34377: 'ImageResources', 34665: 'ExifOffset', 34675: 'InterColorProfile', 34850: 'ExposureProgram', 34852: 'SpectralSensitivity', 34853: 'GPSInfo', 34855: 'ISOSpeedRatings', 34856: 'OECF', 34857: 'Interlace', 34858: 'TimeZoneOffset', 34859: 'SelfTimerMode', 34864: 'SensitivityType', 34865: 'StandardOutputSensitivity', 34866: 'RecommendedExposureIndex', 34867: 'ISOSpeed', 34868: 'ISOSpeedLatitudeyyy', 34869: 'ISOSpeedLatitudezzz', 36864: 'ExifVersion', 36867: 'DateTimeOriginal', 36868: 'DateTimeDigitized', 36880: 'OffsetTime', 36881: 'OffsetTimeOriginal', 36882: 'OffsetTimeDigitized', 37121: 'ComponentsConfiguration', 37122: 'CompressedBitsPerPixel', 37377: 'ShutterSpeedValue', 37378: 'ApertureValue', 37379: 'BrightnessValue', 37380: 'ExposureBiasValue', 37381: 'MaxApertureValue', 37382: 'SubjectDistance', 37383: 'MeteringMode', 37384: 'LightSource', 37385: 'Flash', 37386: 'FocalLength', 37389: 'Noise', 37393: 'ImageNumber', 37394: 'SecurityClassification', 37395: 'ImageHistory', 37398: 'TIFF/EPStandardID', 37500: 'MakerNote', 37510: 'UserComment', 37520: 'SubsecTime', 37521: 'SubsecTimeOriginal', 37522: 'SubsecTimeDigitized', 37888: 'AmbientTemperature', 37889: 'Humidity', 37890: 'Pressure', 37891: 'WaterDepth', 37892: 'Acceleration', 37893: 'CameraElevationAngle', 40091: 'XPTitle', 40092: 'XPComment', 40093: 'XPAuthor', 40094: 'XPKeywords', 40095: 'XPSubject', 40960: 'FlashPixVersion', 40961: 'ColorSpace', 40962: 'ExifImageWidth', 40963: 'ExifImageHeight', 40964: 'RelatedSoundFile', 40965: 'ExifInteroperabilityOffset', 41483: 'FlashEnergy', 41484: 'SpatialFrequencyResponse', 41486: 'FocalPlaneXResolution', 41487: 'FocalPlaneYResolution', 41488: 'FocalPlaneResolutionUnit', 41492: 'SubjectLocation', 41493: 'ExposureIndex', 41495: 'SensingMethod', 41728: 'FileSource', 41729: 'SceneType', 41730: 'CFAPattern', 41985: 'CustomRendered', 41986: 'ExposureMode', 41987: 'WhiteBalance', 41988: 'DigitalZoomRatio', 41989: 'FocalLengthIn35mmFilm', 41990: 'SceneCaptureType', 41991: 'GainControl', 41992: 'Contrast', 41993: 'Saturation', 41994: 'Sharpness', 41995: 'DeviceSettingDescription', 41996: 'SubjectDistanceRange', 42016: 'ImageUniqueID', 42032: 'CameraOwnerName', 42033: 'BodySerialNumber', 42034: 'LensSpecification', 42035: 'LensMake', 42036: 'LensModel', 42037: 'LensSerialNumber', 42080: 'CompositeImage', 42081: 'CompositeImageCount', 42082: 'CompositeImageExposureTimes', 42240: 'Gamma', 50341: 'PrintImageMatching', 50706: 'DNGVersion', 50707: 'DNGBackwardVersion', 50708: 'UniqueCameraModel', 50709: 'LocalizedCameraModel', 50710: 'CFAPlaneColor', 50711: 'CFALayout', 50712: 'LinearizationTable', 50713: 'BlackLevelRepeatDim', 50714: 'BlackLevel', 50715: 'BlackLevelDeltaH', 50716: 'BlackLevelDeltaV', 50717: 'WhiteLevel', 50718: 'DefaultScale', 50719: 'DefaultCropOrigin', 50720: 'DefaultCropSize', 50721: 'ColorMatrix1', 50722: 'ColorMatrix2', 50723: 'CameraCalibration1', 50724: 'CameraCalibration2', 50725: 'ReductionMatrix1', 50726: 'ReductionMatrix2', 50727: 'AnalogBalance', 50728: 'AsShotNeutral', 50729: 'AsShotWhiteXY', 50730: 'BaselineExposure', 50731: 'BaselineNoise', 50732: 'BaselineSharpness', 50733: 'BayerGreenSplit', 50734: 'LinearResponseLimit', 50735: 'CameraSerialNumber', 50736: 'LensInfo', 50737: 'ChromaBlurRadius', 50738: 'AntiAliasStrength', 50739: 'ShadowScale', 50740: 'DNGPrivateData', 50741: 'MakerNoteSafety', 50778: 'CalibrationIlluminant1', 50779: 'CalibrationIlluminant2', 50780: 'BestQualityScale', 50781: 'RawDataUniqueID', 50827: 'OriginalRawFileName', 50828: 'OriginalRawFileData', 50829: 'ActiveArea', 50830: 'MaskedAreas', 50831: 'AsShotICCProfile', 50832: 'AsShotPreProfileMatrix', 50833: 'CurrentICCProfile', 50834: 'CurrentPreProfileMatrix', 50879: 'ColorimetricReference', 50931: 'CameraCalibrationSignature', 50932: 'ProfileCalibrationSignature', 50934: 'AsShotProfileName', 50935: 'NoiseReductionApplied', 50936: 'ProfileName', 50937: 'ProfileHueSatMapDims', 50938: 'ProfileHueSatMapData1', 50939: 'ProfileHueSatMapData2', 50940: 'ProfileToneCurve', 50941: 'ProfileEmbedPolicy', 50942: 'ProfileCopyright', 50964: 'ForwardMatrix1', 50965: 'ForwardMatrix2', 50966: 'PreviewApplicationName', 50967: 'PreviewApplicationVersion', 50968: 'PreviewSettingsName', 50969: 'PreviewSettingsDigest', 50970: 'PreviewColorSpace', 50971: 'PreviewDateTime', 50972: 'RawImageDigest', 50973: 'OriginalRawFileDigest', 50974: 'SubTileBlockSize', 50975: 'RowInterleaveFactor', 50981: 'ProfileLookTableDims', 50982: 'ProfileLookTableData', 51008: 'OpcodeList1', 51009: 'OpcodeList2', 51022: 'OpcodeList3', 51041: 'NoiseProfile', 37388: 'SpatialFrequencyResponse', 37396: 'SubjectLocation', 37397: 'ExposureIndex', 33422: 'CFAPattern', 37387: 'FlashEnergy'}

You can export this into a user friendly spreadsheet format using the following lines of code:-

df_exif_tags = pd.DataFrame([ExifTags.TAGS]).T
df_exif_tags.to_excel('EXIF.xlsx', index=True)

We can use this img._getexif().items() to access the specific EXIF metadata attributes available in a picture after reading it like so;-

images = glob.glob(r'Field Trip Map\30-08-2024_05-00-34_7579\*.jpg')

img = Image.open(images[0])
print(img._getexif().items())

Friday, August 9, 2024

Geo visualization of 114 oldest secondary schools in Nigeria

The list is given below;-



Let geocode the schools to have there latitude and longitude coordinates. That is what we need for the Geo visualization. There are many ways to do the geocode, including using tools like Google maps API, OpenStreetMap API etc. The updated table should look like the one below;-


Now, you can import the table into any of the GIS software, here I will use QGIS. If you used automated process to obtain the coordinates, there will certainly be outliers in the result. In my case, I found about two outliers results one fell somewhere in republic of Niger while the other was in Central Africa Republic. So, I manually adjusted them accordingly to be in their right locations within Nigeria.


At this point the 'Geo visualization' has been successfully completed. And obviously we could see the concentration of the schools in the South Western part of the country.

The next thing to do is to add more context to the visualization and conduct analysis based on the objective you have in mind. For example, we could use the 'Age' column to create a proportional point symbol map.


We can also add labels from the attribute table, add state boundaries or state capital locations. Statistical plots could also be added as well as other cartographic elements to enrich the visualization.


That is it!