diff --git a/docs/_toc.yml b/docs/_toc.yml
index 8000614..209f5d8 100755
--- a/docs/_toc.yml
+++ b/docs/_toc.yml
@@ -15,4 +15,3 @@ parts:
chapters:
- file: notebooks/Tutorials/UrbanAreas_tutorials.ipynb
- file: notebooks/Tutorials/LEI_Example.ipynb
-
diff --git a/docs/conf.py b/docs/conf.py
index 08d061d..5867762 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -24,7 +24,7 @@
"sphinx_jupyterbook_latex",
"sphinx.ext.napoleon",
"sphinxcontrib.apidoc",
- #"nbsphinx"
+ # "nbsphinx"
]
external_toc_exclude_missing = True
external_toc_path = "_toc.yml"
diff --git a/notebooks/Implementations/MENA_Benchmarking/NTL_zonal_stats.ipynb b/notebooks/Implementations/MENA_Benchmarking/NTL_zonal_stats.ipynb
new file mode 100644
index 0000000..0b0a63c
--- /dev/null
+++ b/notebooks/Implementations/MENA_Benchmarking/NTL_zonal_stats.ipynb
@@ -0,0 +1,271 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Benchmarking cities in MENA\n",
+ "\n",
+ "In support of an upcoming Urban flagship report, the MENA team is looking for a series of zonal statistics:\n",
+ "\n",
+ "- Nighttime Lights, Population, and built-area: \n",
+ " - Entire FUA \n",
+ " - Its associated urban center / “core” \n",
+ " - Associated “periphery” \n",
+ "\n",
+ "The unit of analysis is the Functional Urban Areas (FUAs) from the [UCDB Database](https://human-settlement.emergency.copernicus.eu/ghs_stat_ucdb2015mt_r2019a.php). For each FUA, we need to grab the associated urban periphary (lower threshold urban areas)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import sys\n",
+ "import os\n",
+ "\n",
+ "import geopandas as gpd\n",
+ "\n",
+ "\n",
+ "sys.path.append(\"C:/WBG/Work/Code/GOSTrocks/src\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 83,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "data_folder = \"C:/WBG/Work/data\"\n",
+ "ucdb_file = os.path.join(\n",
+ " data_folder,\n",
+ " \"URBAN\",\n",
+ " \"GHS_STAT_UCDB2015MT_GLOBE_R2019A\",\n",
+ " \"GHS_STAT_UCDB2015MT_GLOBE_R2019A_V1_2.gpkg\",\n",
+ ")\n",
+ "fua_file = os.path.join(\n",
+ " data_folder, \"URBAN\", \"GHS_FUA_UCDB2015_GLOBE_R2019A_54009_1K_V1_0.gpkg\"\n",
+ ")\n",
+ "\n",
+ "out_folder = \"C:/WBG/Work/MENA_Urban/\"\n",
+ "urban_res_folder = os.path.join(out_folder, \"urban_data\")\n",
+ "\n",
+ "for out_folder in [urban_res_folder]:\n",
+ " if not os.path.exists(out_folder):\n",
+ " os.makedirs(out_folder)\n",
+ "\n",
+ "urban_periphary_file = os.path.join(urban_res_folder, \"urban_periphary.gpkg\")\n",
+ "urban_core_file = os.path.join(urban_res_folder, \"urban_core.gpkg\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 108,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "inD = gpd.read_file(ucdb_file)\n",
+ "inF = gpd.read_file(fua_file)\n",
+ "inD = inD.to_crs(inF.crs)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 85,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " eFUA_ID | \n",
+ " UC_num | \n",
+ " UC_IDs | \n",
+ " eFUA_name | \n",
+ " Commuting | \n",
+ " Cntry_ISO | \n",
+ " Cntry_name | \n",
+ " FUA_area | \n",
+ " UC_area | \n",
+ " FUA_p_2015 | \n",
+ " UC_p_2015 | \n",
+ " Com_p_2015 | \n",
+ " geometry | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 1.0 | \n",
+ " 1.0 | \n",
+ " 5959 | \n",
+ " Herat | \n",
+ " 1.0 | \n",
+ " AFG | \n",
+ " Afghanistan | \n",
+ " 139.0 | \n",
+ " 60.0 | \n",
+ " 888811.175807 | \n",
+ " 844574.564331 | \n",
+ " 44236.611476 | \n",
+ " MULTIPOLYGON (((5529000.000 4155000.000, 55310... | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " 192.0 | \n",
+ " 1.0 | \n",
+ " 5964 | \n",
+ " Guzarah | \n",
+ " 1.0 | \n",
+ " AFG | \n",
+ " Afghanistan | \n",
+ " 32.0 | \n",
+ " 16.0 | \n",
+ " 169489.573231 | \n",
+ " 160204.260864 | \n",
+ " 9285.312366 | \n",
+ " MULTIPOLYGON (((5541000.000 4139000.000, 55420... | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " 354.0 | \n",
+ " 1.0 | \n",
+ " 5968 | \n",
+ " Shindand | \n",
+ " 0.0 | \n",
+ " AFG | \n",
+ " Afghanistan | \n",
+ " 12.0 | \n",
+ " 12.0 | \n",
+ " 147553.403870 | \n",
+ " 147553.403870 | \n",
+ " 0.000000 | \n",
+ " MULTIPOLYGON (((5573000.000 4030000.000, 55740... | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " 505.0 | \n",
+ " 1.0 | \n",
+ " 5970 | \n",
+ " Qala i Naw | \n",
+ " 0.0 | \n",
+ " AFG | \n",
+ " Afghanistan | \n",
+ " 3.0 | \n",
+ " 3.0 | \n",
+ " 79809.722656 | \n",
+ " 79809.722656 | \n",
+ " 0.000000 | \n",
+ " MULTIPOLYGON (((5592000.000 4221000.000, 55900... | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " 648.0 | \n",
+ " 1.0 | \n",
+ " 5973 | \n",
+ " Farah | \n",
+ " 1.0 | \n",
+ " AFG | \n",
+ " Afghanistan | \n",
+ " 32.0 | \n",
+ " 16.0 | \n",
+ " 131508.797060 | \n",
+ " 122843.460327 | \n",
+ " 8665.336733 | \n",
+ " MULTIPOLYGON (((5607000.000 3923000.000, 56080... | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " eFUA_ID UC_num UC_IDs eFUA_name Commuting Cntry_ISO Cntry_name \\\n",
+ "0 1.0 1.0 5959 Herat 1.0 AFG Afghanistan \n",
+ "1 192.0 1.0 5964 Guzarah 1.0 AFG Afghanistan \n",
+ "2 354.0 1.0 5968 Shindand 0.0 AFG Afghanistan \n",
+ "3 505.0 1.0 5970 Qala i Naw 0.0 AFG Afghanistan \n",
+ "4 648.0 1.0 5973 Farah 1.0 AFG Afghanistan \n",
+ "\n",
+ " FUA_area UC_area FUA_p_2015 UC_p_2015 Com_p_2015 \\\n",
+ "0 139.0 60.0 888811.175807 844574.564331 44236.611476 \n",
+ "1 32.0 16.0 169489.573231 160204.260864 9285.312366 \n",
+ "2 12.0 12.0 147553.403870 147553.403870 0.000000 \n",
+ "3 3.0 3.0 79809.722656 79809.722656 0.000000 \n",
+ "4 32.0 16.0 131508.797060 122843.460327 8665.336733 \n",
+ "\n",
+ " geometry \n",
+ "0 MULTIPOLYGON (((5529000.000 4155000.000, 55310... \n",
+ "1 MULTIPOLYGON (((5541000.000 4139000.000, 55420... \n",
+ "2 MULTIPOLYGON (((5573000.000 4030000.000, 55740... \n",
+ "3 MULTIPOLYGON (((5592000.000 4221000.000, 55900... \n",
+ "4 MULTIPOLYGON (((5607000.000 3923000.000, 56080... "
+ ]
+ },
+ "execution_count": 85,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "inF.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 113,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "fua_peripheries = inF.copy()\n",
+ "for idx, row in inF.iterrows():\n",
+ " # grab the related UCDBs\n",
+ " ucdb_ids = row[\"UC_IDs\"].split(\";\")\n",
+ " ucdb_ids = [int(x) for x in ucdb_ids]\n",
+ " sel_cores = inD.loc[inD[\"ID_HDC_G0\"].isin(ucdb_ids)]\n",
+ " periphery_geom = row[\"geometry\"].difference(sel_cores.unary_union)\n",
+ " fua_peripheries.loc[idx, \"geometry\"] = periphery_geom\n",
+ "\n",
+ "fua_peripheries.to_file(os.path.join(out_folder, \"FUA_peripheries.gpkg\"), driver=\"GPKG\")"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "urban_test",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.2"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/notebooks/Implementations/URB_LKA_CCDR_Support/LKA_Urban_Comp.ipynb b/notebooks/Implementations/URB_LKA_CCDR_Support/LKA_Urban_Comp.ipynb
new file mode 100644
index 0000000..41fd046
--- /dev/null
+++ b/notebooks/Implementations/URB_LKA_CCDR_Support/LKA_Urban_Comp.ipynb
@@ -0,0 +1,155 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "9fd5e3dd",
+ "metadata": {},
+ "source": [
+ "# Urbanization comparison\n",
+ "\n",
+ "Compare the multiple urban extents in the project\n",
+ "\n",
+ "1. Official administrative boundaries provided by the project team. \n",
+ " a. Need to get metadata on these boundaries from Swati \n",
+ "2. Official administrative boundaries with buffers \n",
+ " a. Why these specific buffer distances? \n",
+ "3. DoU boundaries \n",
+ " a. Which population source \n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9c6da02f",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import os\n",
+ "import rasterio\n",
+ "\n",
+ "import pandas as pd\n",
+ "import geopandas as gpd\n",
+ "\n",
+ "import GOSTurban.UrbanRaster as urban"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "df1bf2ed",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "base_folder = \"C:/WBG/Work/LKA_URB_CCDR/Data\"\n",
+ "urban_folder = os.path.join(base_folder, \"Urban_extents\")\n",
+ "population_folder = os.path.join(base_folder, \"Population\")\n",
+ "\n",
+ "worldpop_file = os.path.join(population_folder, \"lka_ppp_2020_UNadj.tif\")\n",
+ "combo_extents = os.path.join(urban_folder, \"combo_extents.shp\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b135cd57",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Combine all the urban extents into a single file\n",
+ "if not os.path.exists(combo_extents):\n",
+ " urban_extents = [\n",
+ " x\n",
+ " for x in os.listdir(os.path.join(urban_folder, \"Final_AoI\"))\n",
+ " if x.endswith(\".shp\")\n",
+ " ]\n",
+ " all_extents = []\n",
+ " for urban_extent in urban_extents:\n",
+ " urban_extent_gdf = gpd.read_file(\n",
+ " os.path.join(urban_folder, \"Final_AoI\", urban_extent)\n",
+ " )\n",
+ " type = urban_extent.split(\"_\")[1].replace(\".shp\", \"\").lower()\n",
+ " if type == \"colombo\":\n",
+ " type = \"city\"\n",
+ " if \"buffer\" in urban_extent:\n",
+ " type = \"20km\"\n",
+ " urban_extent_gdf[\"Type\"] = type\n",
+ " all_extents.append(urban_extent_gdf)\n",
+ "\n",
+ " all_extents_gdf = gpd.GeoDataFrame(pd.concat(all_extents, ignore_index=True))\n",
+ " all_extents_gdf.to_file(combo_extents)\n",
+ "else:\n",
+ " all_extents_gdf = gpd.read_file(combo_extents)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "97add9be",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# extract urban extents from population grids\n",
+ "for pop_layer, name in [[worldpop_file, \"WorldPop\"]]:\n",
+ " urb_out_file = os.path.join(urban_folder, \"DOU_urb_extent_%s.shp\" % name)\n",
+ " hd_out_file = os.path.join(urban_folder, \"DOU_hd_extent_%s.shp\" % name)\n",
+ " if not os.path.exists(hd_out_file):\n",
+ " curPop = rasterio.open(pop_layer)\n",
+ " urban_calculator = urban.urbanGriddedPop(curPop)\n",
+ " urban_extents = urban_calculator.calculateUrban(\n",
+ " densVal=3, totalPopThresh=5000, smooth=False, queen=False, verbose=True\n",
+ " )\n",
+ " urban_extents[\"Type\"] = name\n",
+ " urban_extents.to_file(urb_out_file)\n",
+ "\n",
+ " hd_urban_extents = urban_calculator.calculateUrban(\n",
+ " densVal=15,\n",
+ " totalPopThresh=50000,\n",
+ " smooth=True,\n",
+ " queen=True, # high density extents use queen's case contiguity, and\n",
+ " verbose=True,\n",
+ " ) # High density extents have hole smoothing applied.\n",
+ " hd_urban_extents[\"Type\"] = name\n",
+ " hd_urban_extents.to_file(hd_out_file)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "34d495aa",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "dir(GOSTurban)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "50115a0d",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/notebooks/Implementations/URB_LKA_CCDR_Support/README.md b/notebooks/Implementations/URB_LKA_CCDR_Support/README.md
new file mode 100644
index 0000000..3d58bb8
--- /dev/null
+++ b/notebooks/Implementations/URB_LKA_CCDR_Support/README.md
@@ -0,0 +1,11 @@
+# Urbanization for CCDR
+The Sri Lanka CCDR team is looking for support in urban deep dives on 9 provincial capitals. The following analysis will be completed:
+
+| Analysis | Phase | Notebook | Plan |
+| --- | --- | --- | --- |
+| Urbanization analysis | 1 | | Compare official and buffered boundaries to quantified boundaries (DoU); summarize population within all boundaries |
+| Urban expansion | 1 | | Summariuze GHSL within urban extents |
+| Urban Poverty | 3 | | Analyze high-resolution satellite imagery to identify informality in residential settlements |
+| Urban economics | 2 | | Summarize monthly nighttime lights |
+| Status of infrastructure | 2 | | TBD |
+| GHG emissions | 1 | | Summarize Brian B's GHG emissions data |
diff --git a/notebooks/Replications/URB_NovelUrbanization.py b/notebooks/Replications/URB_NovelUrbanization.py
index a31150b..4897bf6 100644
--- a/notebooks/Replications/URB_NovelUrbanization.py
+++ b/notebooks/Replications/URB_NovelUrbanization.py
@@ -1,39 +1,43 @@
-import sys, os, shutil, requests
+import sys
+import os
+import shutil
+import requests
import rasterio
-import pandas as pd
-import geopandas as gpd
-import numpy as np
import GOSTurban.UrbanRaster as urban
+
def download_pop_file(url, filename):
# Open the url
r = requests.get(url)
# Set decode_content value to True, otherwise the downloaded image file's size will be zero.
r.raw.decode_content = True
# Open a local file with wb ( write binary ) permission.
- with open(filename,'wb') as f:
+ with open(filename, "wb") as f:
shutil.copyfileobj(r.raw, f)
+
def main(iso3, out_folder):
# download the population data
- wp_url = f'https://data.worldpop.org/GIS/Population/Global_2000_2020_1km/2020/{iso3.upper()}/{iso3.lower()}_ppp_2020_1km_Aggregated.tif'
- print (wp_url)
+ wp_url = f"https://data.worldpop.org/GIS/Population/Global_2000_2020_1km/2020/{iso3.upper()}/{iso3.lower()}_ppp_2020_1km_Aggregated.tif"
+ print(wp_url)
if not os.path.exists(out_folder):
os.makedirs(out_folder)
- out_file = os.path.join(out_folder, f'{iso3}_ppp_2020_1km_Aggregated.tif')
+ out_file = os.path.join(out_folder, f"{iso3}_ppp_2020_1km_Aggregated.tif")
out_urban = os.path.join(out_folder, "urban_extents.geojson")
out_hd_urban = os.path.join(out_folder, "hd_urban_extents.geojson")
-
+
try:
if not os.path.exists(out_file):
download_pop_file(wp_url, out_file)
except:
print(f"Could not download national population data for {iso3} from {wp_url}")
- print("If you can manually download to the defined out_folder, the script will run")
+ print(
+ "If you can manually download to the defined out_folder, the script will run"
+ )
if os.path.exists(out_file):
- inR = rasterio.open(out_file)
+ inR = rasterio.open(out_file)
urban_calculator = urban.urbanGriddedPop(inR)
urban_extents = urban_calculator.calculateUrban(
densVal=300, totalPopThresh=5000, smooth=False, queen=False
@@ -43,14 +47,14 @@ def main(iso3, out_folder):
densVal=1500,
totalPopThresh=50000,
smooth=True,
- queen=True, # high density extents use queen's case contiguity, and are smoothed
+ queen=True, # high density extents use queen's case contiguity, and are smoothed
)
urban_extents.to_file(out_urban, driver="GeoJSON")
hd_urban_extents.to_file(out_hd_urban, driver="GeoJSON")
+
if __name__ == "__main__":
iso3 = sys.argv[1]
out_folder = sys.argv[2]
main(iso3, out_folder)
-
diff --git a/notebooks/Tutorials/LEI_Example.ipynb b/notebooks/Tutorials/LEI_Example.ipynb
index 2a78b22..b9f0bea 100644
--- a/notebooks/Tutorials/LEI_Example.ipynb
+++ b/notebooks/Tutorials/LEI_Example.ipynb
@@ -24,20 +24,16 @@
"outputs": [],
"source": [
"import os\n",
- "import sys\n",
- "import importlib\n",
"import rasterio\n",
"import rasterio.features\n",
"\n",
"import geopandas as gpd\n",
"import pandas as pd\n",
- "import numpy as np\n",
"\n",
"import GOSTrocks.rasterMisc as rMisc\n",
"import GOSTrocks.ghslMisc as ghslMisc\n",
"import GOSTurban.LEI as lei\n",
"import GOSTrocks.mapMisc as mapMisc\n",
- "import GOSTrocks.rasterMisc as rMisc\n",
"\n",
"%load_ext autoreload\n",
"%autoreload 2"
@@ -71,18 +67,22 @@
" temp_folder = \"C:/Temp\"\n",
" # clip from global GHSL file\n",
" ghsl_folder = \"J:/Data/GLOBAL/GHSL/built\"\n",
- " ghsl_files = [os.path.join(ghsl_folder, x) for x in os.listdir(ghsl_folder) if x.endswith(\".tif\")] \n",
+ " ghsl_files = [\n",
+ " os.path.join(ghsl_folder, x)\n",
+ " for x in os.listdir(ghsl_folder)\n",
+ " if x.endswith(\".tif\")\n",
+ " ]\n",
" inA = gpd.read_file(aoi_file)\n",
- " \n",
+ "\n",
" temp_ghsl_files = []\n",
" for ghsl_file in ghsl_files:\n",
" temp_file = os.path.join(temp_folder, os.path.basename(ghsl_file))\n",
" temp_ghsl_files.append(temp_file)\n",
" if not os.path.exists(temp_file):\n",
" rMisc.clipRaster(rasterio.open(ghsl_file), inA, temp_file)\n",
- " \n",
+ "\n",
" ghsl_res, ghsl_profile = ghslMisc.combine_ghsl_annual(temp_ghsl_files)\n",
- " with rasterio.open(input_ghsl, 'w', **ghsl_profile) as outR:\n",
+ " with rasterio.open(input_ghsl, \"w\", **ghsl_profile) as outR:\n",
" outR.write_band(1, ghsl_res)"
]
},
@@ -116,7 +116,7 @@
"source": [
"ghsl_r = rasterio.open(input_ghsl)\n",
"ghsl_d = ghsl_r.read()\n",
- "ghsl_d[ghsl_d == ghsl_r.meta['nodata']] = 0\n",
+ "ghsl_d[ghsl_d == ghsl_r.meta[\"nodata\"]] = 0\n",
"\n",
"thresh = list(range(1975, 2031, 5))\n",
"with rMisc.create_rasterio_inmemory(ghsl_r.profile, ghsl_d) as temp_ghsl:\n",
@@ -218,8 +218,14 @@
],
"source": [
"# This calculates the change from 1990 and 2000\n",
- "lei_raw = lei.calculate_LEI(input_ghsl, old_list=list(range(1975,1991,5)), new_list=list(range(1995,2001,5)))\n",
- "lei_90_00 = gpd.GeoDataFrame(pd.DataFrame(lei_raw, columns=[\"geometry\", \"old\", \"total\"]), geometry='geometry', crs=ghsl_r.crs)\n",
+ "lei_raw = lei.calculate_LEI(\n",
+ " input_ghsl, old_list=list(range(1975, 1991, 5)), new_list=list(range(1995, 2001, 5))\n",
+ ")\n",
+ "lei_90_00 = gpd.GeoDataFrame(\n",
+ " pd.DataFrame(lei_raw, columns=[\"geometry\", \"old\", \"total\"]),\n",
+ " geometry=\"geometry\",\n",
+ " crs=ghsl_r.crs,\n",
+ ")\n",
"lei_90_00[\"LEI\"] = lei_90_00[\"old\"] / lei_90_00[\"total\"]\n",
"\n",
"lei_90_00.head()"
@@ -260,10 +266,12 @@
}
],
"source": [
- "#Map LEI results\n",
- "leap_val=0.30\n",
- "exp_val=0.70\n",
- "lei_90_00['area'] = lei_90_00['geometry'].apply(lambda x: x.area)\n",
+ "# Map LEI results\n",
+ "leap_val = 0.30\n",
+ "exp_val = 0.70\n",
+ "lei_90_00[\"area\"] = lei_90_00[\"geometry\"].apply(lambda x: x.area)\n",
+ "\n",
+ "\n",
"def calculate_LEI(val, leap_val, exp_val):\n",
" if val <= leap_val:\n",
" return 3\n",
@@ -271,8 +279,14 @@
" return 2\n",
" else:\n",
" return 1\n",
- "lei_90_00[\"class\"] = lei_90_00[\"LEI\"].apply(lambda x: calculate_LEI(x, leap_val, exp_val))\n",
- "mapMisc.static_map_vector(lei_90_00, \"class\", edgecolor='match', colormap=\"Dark2\")#, basemap=ctx.providers.CartoDB.Voyager)"
+ "\n",
+ "\n",
+ "lei_90_00[\"class\"] = lei_90_00[\"LEI\"].apply(\n",
+ " lambda x: calculate_LEI(x, leap_val, exp_val)\n",
+ ")\n",
+ "mapMisc.static_map_vector(\n",
+ " lei_90_00, \"class\", edgecolor=\"match\", colormap=\"Dark2\"\n",
+ ") # , basemap=ctx.providers.CartoDB.Voyager)"
]
},
{
@@ -395,7 +409,9 @@
],
"source": [
"# This calculates the change from 2000 and 2014\n",
- "lei_raw = lei.calculate_LEI(input_ghsl, old_list=list(range(1975,2011,5)), new_list=list(range(2015,2030,5)))\n",
+ "lei_raw = lei.calculate_LEI(\n",
+ " input_ghsl, old_list=list(range(1975, 2011, 5)), new_list=list(range(2015, 2030, 5))\n",
+ ")\n",
"lei_00_14 = pd.DataFrame(lei_raw, columns=[\"geometry\", \"old\", \"total\"])\n",
"lei_00_14[\"LEI\"] = lei_00_14[\"old\"] / lei_00_14[\"total\"]\n",
"lei_00_14.head()"
@@ -436,10 +452,12 @@
}
],
"source": [
- "#Map LEI results\n",
- "leap_val=0.30\n",
- "exp_val=0.70\n",
- "lei_90_00['area'] = lei_90_00['geometry'].apply(lambda x: x.area)\n",
+ "# Map LEI results\n",
+ "leap_val = 0.30\n",
+ "exp_val = 0.70\n",
+ "lei_90_00[\"area\"] = lei_90_00[\"geometry\"].apply(lambda x: x.area)\n",
+ "\n",
+ "\n",
"def calculate_LEI(val, leap_val, exp_val):\n",
" if val <= leap_val:\n",
" return 3\n",
@@ -447,8 +465,14 @@
" return 2\n",
" else:\n",
" return 1\n",
- "lei_90_00[\"class\"] = lei_90_00[\"LEI\"].apply(lambda x: calculate_LEI(x, leap_val, exp_val))\n",
- "mapMisc.static_map_vector(lei_90_00, \"class\", edgecolor='match', colormap=\"Dark2\")#, basemap=ctx.providers.CartoDB.Voyager)"
+ "\n",
+ "\n",
+ "lei_90_00[\"class\"] = lei_90_00[\"LEI\"].apply(\n",
+ " lambda x: calculate_LEI(x, leap_val, exp_val)\n",
+ ")\n",
+ "mapMisc.static_map_vector(\n",
+ " lei_90_00, \"class\", edgecolor=\"match\", colormap=\"Dark2\"\n",
+ ") # , basemap=ctx.providers.CartoDB.Voyager)"
]
},
{
diff --git a/notebooks/Tutorials/UrbanAreas_tutorials.ipynb b/notebooks/Tutorials/UrbanAreas_tutorials.ipynb
index 1d20c25..8ba47e3 100644
--- a/notebooks/Tutorials/UrbanAreas_tutorials.ipynb
+++ b/notebooks/Tutorials/UrbanAreas_tutorials.ipynb
@@ -100,7 +100,7 @@
}
],
"source": [
- "mapMisc.static_map_raster(inR, thresh=[1,5,50,100,300,1000,3000])"
+ "mapMisc.static_map_raster(inR, thresh=[1, 5, 50, 100, 300, 1000, 3000])"
]
},
{
@@ -223,7 +223,7 @@
"urban_extents = urban_calculator.calculateUrban(\n",
" densVal=300, totalPopThresh=5000, smooth=False, queen=False, verbose=True\n",
")\n",
- "urban_extents['Type'] = 1\n",
+ "urban_extents[\"Type\"] = 1\n",
"urban_extents.head()"
]
},
@@ -406,7 +406,7 @@
" queen=True, # high density extents use queen's case contiguity, and\n",
" verbose=True,\n",
") # High density extents have hole smoothing applied.\n",
- "hd_urban_extents['Type'] = 2\n",
+ "hd_urban_extents[\"Type\"] = 2\n",
"hd_urban_extents.head()"
]
},
@@ -491,7 +491,7 @@
],
"source": [
"combo_extents = pd.concat([urban_extents, hd_urban_extents])\n",
- "mapMisc.static_map_vector(combo_extents, \"Type\", colormap='magma')"
+ "mapMisc.static_map_vector(combo_extents, \"Type\", colormap=\"magma\")"
]
},
{
diff --git a/requirements.txt b/requirements.txt
index abbc506..f1a28c7 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -10,5 +10,3 @@ elevation
geojson
hatch
git
-
-
diff --git a/src/GOSTurban/LEI.py b/src/GOSTurban/LEI.py
index 5ee1cf0..5293a34 100755
--- a/src/GOSTurban/LEI.py
+++ b/src/GOSTurban/LEI.py
@@ -195,4 +195,4 @@ def calculate_LEI(val, leap_val, exp_val):
res["class"] = res["LEI"].apply(lambda x: calculate_LEI(x, leap_val, exp_val))
xx = res.groupby("class")
- return xx['area'].sum()
+ return xx["area"].sum()