Browse Source

initial import 0.7.4-dev

tags/v0.7.4
Olivier Courtin 1 month ago
parent
commit
c2e7fcb39c
57 changed files with 4767 additions and 2 deletions
  1. +5
    -0
      MANIFEST.in
  2. +161
    -0
      Makefile
  3. +192
    -2
      README.md
  4. +64
    -0
      config.toml
  5. +114
    -0
      docs/101.md
  6. +67
    -0
      docs/config.md
  7. BIN
      docs/img/101/images.png
  8. BIN
      docs/img/101/labels.png
  9. BIN
      docs/img/101/predict_compare.png
  10. BIN
      docs/img/101/predict_compare_side.png
  11. BIN
      docs/img/101/predict_masks.png
  12. BIN
      docs/img/readme/draw_me_neat_eo.png
  13. BIN
      docs/img/readme/stacks.png
  14. +20
    -0
      docs/makefile.md
  15. +374
    -0
      docs/tools.md
  16. +1
    -0
      neat_eo/__init__.py
  17. +203
    -0
      neat_eo/core.py
  18. +0
    -0
      neat_eo/da/__init__.py
  19. +43
    -0
      neat_eo/da/core.py
  20. +39
    -0
      neat_eo/da/rgb.py
  21. +82
    -0
      neat_eo/geojson.py
  22. +0
    -0
      neat_eo/loaders/__init__.py
  23. +93
    -0
      neat_eo/loaders/semseg.py
  24. +0
    -0
      neat_eo/losses/__init__.py
  25. +47
    -0
      neat_eo/losses/lovasz.py
  26. +16
    -0
      neat_eo/metrics/IoU.py
  27. +16
    -0
      neat_eo/metrics/MCC.py
  28. +23
    -0
      neat_eo/metrics/QoD.py
  29. +0
    -0
      neat_eo/metrics/__init__.py
  30. +59
    -0
      neat_eo/metrics/core.py
  31. +0
    -0
      neat_eo/nn/__init__.py
  32. +105
    -0
      neat_eo/nn/albunet.py
  33. +0
    -0
      neat_eo/osm/__init__.py
  34. +42
    -0
      neat_eo/osm/building.py
  35. +117
    -0
      neat_eo/osm/road.py
  36. +340
    -0
      neat_eo/tiles.py
  37. +0
    -0
      neat_eo/tools/__init__.py
  38. +50
    -0
      neat_eo/tools/__main__.py
  39. +196
    -0
      neat_eo/tools/_sat.py
  40. +224
    -0
      neat_eo/tools/compare.py
  41. +213
    -0
      neat_eo/tools/cover.py
  42. +72
    -0
      neat_eo/tools/dataset.py
  43. +119
    -0
      neat_eo/tools/download.py
  44. +152
    -0
      neat_eo/tools/eval.py
  45. +131
    -0
      neat_eo/tools/export.py
  46. +35
    -0
      neat_eo/tools/extract.py
  47. +129
    -0
      neat_eo/tools/info.py
  48. +143
    -0
      neat_eo/tools/predict.py
  49. +195
    -0
      neat_eo/tools/rasterize.py
  50. +83
    -0
      neat_eo/tools/subset.py
  51. +274
    -0
      neat_eo/tools/tile.py
  52. +253
    -0
      neat_eo/tools/train.py
  53. +63
    -0
      neat_eo/tools/vectorize.py
  54. +66
    -0
      neat_eo/web_ui/compare.html
  55. +73
    -0
      neat_eo/web_ui/leaflet.html
  56. +18
    -0
      requirements.txt
  57. +55
    -0
      setup.py

+ 5
- 0
MANIFEST.in View File

@@ -0,0 +1,5 @@
include LICENSE
include requirements.txt
include neat_eo/web_ui/compare.html
include neat_eo/web_ui/leaflet.html
recursive-include data *

+ 161
- 0
Makefile View File

@@ -0,0 +1,161 @@
help:
@echo "This Makefile rules are designed for Neat-EO.pink devs and power-users."
@echo "For plain user installation follow README.md instructions, instead."
@echo ""
@echo ""
@echo " make install To install, few Python dev tools and Neat-EO.pink in editable mode."
@echo " So any further Neat-EO.pink Python code modification will be usable at once,"
@echo " throught either neo tools commands or neat_eo.* modules."
@echo ""
@echo " make check Launchs code tests, and tools doc updating."
@echo " Do it, at least, before sending a Pull Request."
@echo ""
@echo " make check_tuto Launchs neo commands embeded in tutorials, to be sure everything still up to date."
@echo " Do it, at least, on each CLI modifications, and before a release."
@echo " NOTA: It takes a while."
@echo ""
@echo " make pink Python code beautifier,"
@echo " as Pink is the new Black ^^"



# Dev install
install:
pip3 install pytest black flake8 twine
pip3 install -e .


# Lauch all tests
check: ut it doc
@echo "==================================================================================="
@echo "All tests passed !"
@echo "==================================================================================="


# Python code beautifier
pink:
black -l 125 *.py neat_eo/*.py neat_eo/*/*.py tests/*py tests/*/*.py


# Perform units tests, and linter checks
ut:
@echo "==================================================================================="
black -l 125 --check *.py neat_eo/*.py neat_eo/*/*.py
@echo "==================================================================================="
flake8 --max-line-length 125 --ignore=E203,E241,E226,E272,E261,E221,W503,E722
@echo "==================================================================================="
pytest tests -W ignore::UserWarning


# Launch Integration Tests
it: it_pre it_train it_post


# Integration Tests: Data Preparation
it_pre:
@echo "==================================================================================="
rm -rf it
neo info
neo cover --zoom 18 --bbox 4.8,45.7,4.82,45.72 --out it/cover
neo download --rate 20 --type WMS --url "https://download.data.grandlyon.com/wms/grandlyon?SERVICE=WMS&REQUEST=GetMap&VERSION=1.3.0&LAYERS=Ortho2015_vue_ensemble_16cm_CC46&WIDTH=512&HEIGHT=512&CRS=EPSG:3857&BBOX={xmin},{ymin},{xmax},{ymax}&FORMAT=image/jpeg" --cover it/cover --out it/images
echo "Download Buildings GeoJSON" && wget --show-progress -q -nc -O it/lyon_roofprint.json "https://download.data.grandlyon.com/wfs/grandlyon?SERVICE=WFS&REQUEST=GetFeature&TYPENAME=ms:fpc_fond_plan_communaut.fpctoit&VERSION=1.1.0&srsName=EPSG:4326&BBOX=4.79,45.69,4.83,45.73&outputFormat=application/json; subtype=geojson" | true
echo "Download Roads GeoJSON" && wget --show-progress -q -nc -O it/lyon_road.json "https://download.data.grandlyon.com/wfs/grandlyon?SERVICE=WFS&VERSION=1.1.0&request=GetFeature&typename=pvo_patrimoine_voirie.pvochausseetrottoir&outputFormat=application/json; subtype=geojson&SRSNAME=EPSG:4326&bbox=`neo cover --dir it/images --type extent`" | true
ogr2ogr -f SQLite it/lyon_road.sqlite it/lyon_road.json -dsco SPATIALITE=YES -t_srs EPSG:3857 -nln roads -lco GEOMETRY_NAME=geom
ogr2ogr -f GeoJSON it/lyon_road_poly.json it/lyon_road.sqlite -dialect sqlite -sql "SELECT Buffer(geom, IFNULL(largeurchaussee, 5.0) / 2.0) AS geom FROM roads"
neo rasterize --type Building --geojson it/lyon_roofprint.json --config config.toml --cover it/cover --out it/labels
neo rasterize --type Road --geojson it/lyon_road_poly.json --config config.toml --cover it/cover --append --out it/labels
neo rasterize --type Building --geojson it/lyon_roofprint.json --config config.toml --cover it/cover --out it/labels_osm
neo cover --dir it/images --splits 80/20 --out it/train/cover it/eval/cover
neo subset --dir it/images --cover it/train/cover --out it/train/images
neo subset --dir it/labels --cover it/train/cover --out it/train/labels
neo subset --dir it/images --cover it/eval/cover --out it/eval/images
neo subset --dir it/labels --cover it/eval/cover --out it/eval/labels
mkdir --parents it/predict/tiff
wget -nc -O it/predict/tiff/1841_5174_08_CC46.tif "https://download.data.grandlyon.com/files/grandlyon/imagerie/ortho2018/ortho/GeoTiff_YcBcR/1km_8cm_CC46/1841_5174_08_CC46.tif"
wget -nc -O it/predict/tiff/1842_5174_08_CC46.tif "https://download.data.grandlyon.com/files/grandlyon/imagerie/ortho2018/ortho/GeoTiff_YcBcR/1km_8cm_CC46/1842_5174_08_CC46.tif"
neo tile --zoom 18 --rasters it/predict/tiff/*.tif --out it/predict/images
neo cover --zoom 18 --dir it/predict/images --out it/predict/cover
echo "Download PBF" && wget -nc -O it/predict/ra.pbf "http://download.geofabrik.de/europe/france/rhone-alpes-latest.osm.pbf" | true
osmium extract --bbox `neo cover --dir it/predict/images --type extent` -o it/predict/lyon.pbf it/predict/ra.pbf
neo extract --type Building --pbf it/predict/lyon.pbf --out it/predict/osm_building.json
neo extract --type Road --pbf it/predict/lyon.pbf --out it/predict/osm_road.json
neo rasterize --type Building --geojson it/predict/osm_building.json --config config.toml --cover it/predict/cover --out it/predict/labels
neo rasterize --type Road --geojson it/predict/osm_road.json --config config.toml --cover it/predict/cover --append --out it/predict/labels



# Integration Tests: Training
it_train:
@echo "==================================================================================="
export CUDA_VISIBLE_DEVICES=0 && neo train --config config.toml --bs 4 --lr 0.00025 --epochs 2 --dataset it/train --classes_weights `neo dataset --mode weights --dataset it/train --config config.toml` --out it/pth
export CUDA_VISIBLE_DEVICES=0,1 && neo train --config config.toml --bs 4 --lr 0.00025 --epochs 4 --resume --checkpoint it/pth/checkpoint-00002.pth --classes_weights auto --dataset it/train --out it/pth
export CUDA_VISIBLE_DEVICES=0,1 && neo train --config config.toml --bs 4 --optimizer AdamW --lr 0.00025 --epochs 6 --resume --checkpoint it/pth/checkpoint-00004.pth --classes_weights auto --dataset it/train --out it/pth
neo eval --config config.toml --bs 4 --checkpoint it/pth/checkpoint-00006.pth --classes_weights auto --dataset it/eval
neo info --checkpoint it/pth/checkpoint-00006.pth


# Integration Tests: Post Training
it_post:
@echo "==================================================================================="
neo export --checkpoint it/pth/checkpoint-00006.pth --type jit --out it/pth/export.jit
neo export --checkpoint it/pth/checkpoint-00006.pth --type onnx --out it/pth/export.onnx
neo predict --config config.toml --checkpoint it/pth/checkpoint-00006.pth --dataset it/predict --out it/predict/masks
neo predict --metatiles --config config.toml --checkpoint it/pth/checkpoint-00006.pth --dataset it/predict --out it/predict/masks_meta
neo predict --metatiles --keep_borders --config config.toml --checkpoint it/pth/checkpoint-00006.pth --dataset it/predict --out it/predict/masks_keep
neo cover --dir it/predict/masks_meta --out it/predict/cover
neo compare --cover it/predict/cover --config config.toml --images it/predict/images it/predict/labels it/predict/masks --mode stack --labels it/predict/labels --masks it/predict/masks_meta --out it/predict/compare
neo compare --cover it/predict/cover --images it/predict/images it/predict/compare --mode side --out it/predict/compare_side
neo compare --cover it/predict/cover --config config.toml --mode list --labels it/predict/labels --max Building QoD 0.50 --masks it/predict/masks_meta --geojson --out it/predict/compare/tiles.json
cp it/predict/compare/tiles.json it/predict/compare_side/tiles.json
neo vectorize --type Building --config config.toml --masks it/predict/masks_meta --out it/predict/building.json
neo vectorize --type Road --config config.toml --masks it/predict/masks_meta --out it/predict/road.json


# Documentation generation (tools and config file)
doc:
@echo "==================================================================================="
@echo "# Neat-EO.pink tools documentation" > docs/tools.md
@for tool in `ls neat_eo/tools/[^_]*py | sed -e 's#.*/##g' -e 's#.py##'`; do \
echo "Doc generation: $$tool"; \
echo "## neo $$tool" >> docs/tools.md; \
echo '```' >> docs/tools.md; \
neo $$tool -h >> docs/tools.md; \
echo '```' >> docs/tools.md; \
done
@echo "Doc generation: config.toml"
@echo "## config.toml" > docs/config.md; \
echo '```' >> docs/config.md; \
cat config.toml >> docs/config.md; \
echo '```' >> docs/config.md;
@echo "Doc generation: Makefile"
@echo "## Makefile" > docs/makefile.md; \
echo '```' >> docs/makefile.md; \
make --no-print-directory >> docs/makefile.md; \
echo '```' >> docs/makefile.md;


# Check neo commands embeded in Documentation
check_doc:
@echo "==================================================================================="
@echo "Checking README:"
@echo "==================================================================================="
@rm -rf ds && sed -n -e '/```bash/,/```/ p' README.md | sed -e '/```/d' > .CHECK && sh .CHECK
@echo "==================================================================================="


# Check neo commands embeded in Tutorials
check_tuto: check_101

check_101:
@echo "==================================================================================="
@echo "Checking 101"
@mkdir -p tuto && cd tuto && mkdir -p 101 && sed -n -e '/```bash/,/```/ p' ../docs/101.md | sed -e '/```/d' > 101/.CHECK && cd 101 && sh .CHECK && cd ..
@cd tuto/101 && tar cf 101.tar train/images train/labels predict/masks predict/compare predict/compare_side



# Send a release on PyPI
pypi:
rm -rf dist Neat-EO.pink.egg-info
python3 setup.py sdist
twine upload dist/* -r pypi

+ 192
- 2
README.md View File

@@ -1,3 +1,193 @@
# neat-EO
<a href="https://twitter.com/neat_eo"><img src="https://img.shields.io/badge/Follow-neat_eo-ff69b4.svg" /></a> <a href="https://gitter.im/Neat-EO-pink/community"><img src="https://img.shields.io/gitter/room/Neat-EO-pink/community.svg?color=ff69b4&style=popout" /></a>
<a href="https://pepy.tech/project/neat-eo/week"><img align="right" src="https://pepy.tech/badge/neat-eo/week" /></a>


Efficient AI4EO OpenSource framework
<h1 align='center'>neat-EO</h1>
<h2 align='center'>Efficient AI4EO OpenSource framework</h2>

<p align=center>
<img src="https://pbs.twimg.com/media/DpjonykWwAANpPr.jpg" alt="Neat-EO.pink buildings segmentation from Imagery" />
</p>



Purposes:
---------
- DataSet Quality Analysis
- Change Detection highlighter
- Features extraction and completion


Main Features:
--------------
- Provides several command line tools, you can combine together to build your own workflow
- Follows geospatial standards to ease interoperability and performs fast data preparation
- Build-in cutting edge Computer Vision papers implementation
- Support either RGB and multibands imagery, and allows Data Fusion
- Web-UI tools to easily display, hilight or select results (and allow to use your own templates)
- High performances
- Extensible by design




<img alt="Draw me neat-EO" src="https://raw.githubusercontent.com/datapink/neat-eo.pink/master/docs/img/readme/draw_me_neat_eo.png" />


Documentation:
--------------
### Tutorial:
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/101.md">Learn to use neat-EO, in a couple of hours</a>

### Config file:
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/config.md">neat-EO configuration file</a>

### Tools:

- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-cover">`neo cover`</a> Generate a tiles covering, in csv format: X,Y,Z
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-download">`neo download`</a> Downloads tiles from a Web Server (XYZ or WMS)
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-extract">`neo extract`</a> Extracts GeoJSON features from OpenStreetMap .pbf
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-rasterize">`neo rasterize`</a> Rasterize vector features (GeoJSON or PostGIS), to raster tiles
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-subset">`neo subset`</a> Filter images in a slippy map dir using a csv tiles cover
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-tile">`neo tile`</a> Tile a raster coverage
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-train">`neo train`</a> Trains a model on a dataset
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-eval">`neo eval`</a> Evals a model on a dataset
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-export">`neo export`</a> Export a model to ONNX or Torch JIT
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-predict">`neo predict`</a> Predict masks, from a dataset, with an already trained model
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-compare">`neo compare`</a> Compute composite images and/or metrics to compare several slippy map dirs
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-vectorize">`neo vectorize`</a> Extract GeoJSON features from predicted masks
- <a href="https://github.com/datapink/neat-eo/blob/master/docs/tools.md#neo-info">`neo info`</a> Print neat-EO version informations

### Presentations slides:
- <a href="http://www.datapink.com/presentations/2020-fosdem.pdf">@FOSDEM 2020</a>





Installs:
--------

### With PIP:
```
pip3 install neat-EO
```

### With Ubuntu 19.10, from scratch:

```
# neat-EO [mandatory]
sudo sh -c "apt update && apt install -y build-essential python3-pip"
pip3 install neat-EO && export PATH=$PATH:~/.local/bin

# NVIDIA GPU Drivers [mandatory for train and predict]
wget http://us.download.nvidia.com/XFree86/Linux-x86_64/435.21/NVIDIA-Linux-x86_64-435.21.run
sudo sh NVIDIA-Linux-x86_64-435.21.run -a -q --ui=none

# HTTP Server [for WebUI rendering]
sudo apt install -y apache2 && sudo ln -s ~ /var/www/html/neo
```


### NOTAS:
- Requires: Python 3.6 or 3.7
- GPU with VRAM >= 8Go is mandatory
- To test neat-EO install, launch in a new terminal: `neo info`
- If needed, to remove pre-existing Nouveau driver: ```sudo sh -c "echo blacklist nouveau > /etc/modprobe.d/blacklist-nvidia-nouveau.conf && update-initramfs -u && reboot"```




Architecture:
------------

neat-EO use cherry-picked Open Source libs among Deep Learning, Computer Vision and GIS stacks.

<img alt="Stacks" src="https://raw.githubusercontent.com/datapink/neat-EO/master/docs/img/readme/stacks.png" />



GeoSpatial OpenDataSets:
------------------------
- <a href="https://github.com/chrieke/awesome-satellite-imagery-datasets">Christoph Rieke's Awesome Satellite Imagery Datasets</a>
- <a href="https://zhangbin0917.github.io/2018/06/12/%E9%81%A5%E6%84%9F%E6%95%B0%E6%8D%AE%E9%9B%86/">Zhang Bin, Earth Observation OpenDataset blog</a>

Bibliography:
-------------

- <a href="https://arxiv.org/abs/1912.01703">PyTorch: An Imperative Style, High-Performance Deep Learning Library</a>
- <a href="https://arxiv.org/abs/1505.04597">U-Net: Convolutional Networks for Biomedical Image Segmentation</a>
- <a href="https://arxiv.org/abs/1512.03385">Deep Residual Learning for Image Recognition</a>
- <a href="https://arxiv.org/abs/1806.00844">TernausNetV2: Fully Convolutional Network for Instance Segmentation</a>
- <a href="https://arxiv.org/abs/1705.08790">The Lovász-Softmax loss: A tractable surrogate for the optimization of the IoU measure in neural networks</a>
- <a href="https://arxiv.org/abs/1809.06839">Albumentations: fast and flexible image augmentations</a>










Contributions and Services:
---------------------------

- Pull Requests are welcome ! Feel free to send code...
Don't hesitate either to initiate a prior discussion via <a href="https://gitter.im/Neat-EO-pink/community">gitter</a> or ticket on any implementation question.
And give also a look at <a href="https://github.com/datapink/neat-EO/blob/master/docs/makefile.md">Makefile rules</a>.

- If you want to collaborate through code production and maintenance on a long term basis, please get in touch, co-edition with an ad hoc governance can be considered.

- If you want a new feature, but don't want to implement it, <a href="http://datapink.com">DataPink</a> provide core-dev services.

- Expertise, assistance and training on neat-EO are also provided by <a href="http://datapink.com">DataPink</a>.

- And if you want to support the whole project, because it means for your own business, funding is also welcome.


### Requests for funding:

We've already identified several new features and research papers able to improve again neat-EO,
your funding would make a difference to implement them on a coming release:

- Increase again accuracy :
- on low resolution imagery
- even with few labels (aka Weakly Supervised)
- Topology handling
- Instance Segmentation
- Improve again performances

- Add support for :
- Time Series Imagery
- StreetView Imagery
- MultiHosts scaling
- Vectors post-treatments
- ...






Authors:
--------
- Olivier Courtin <https://github.com/ocourtin>
- Daniel J. Hofmann <https://github.com/daniel-j-h>



Citing:
-------
```
@Manual{,
title = {neat-EO} Efficient AI4EO OpenSource framework},
author = {Olivier Courtin, Daniel J. Hofmann},
organization = {DataPink},
year = {2020},
url = {http://neat-EO.pink},
}
```

+ 64
- 0
config.toml View File

@@ -0,0 +1,64 @@
# Neat-EO.pink Configuration


# Input channels configuration
# You can, add several channels blocks to compose your input Tensor. Order is meaningful.
#
# name: dataset subdirectory name
# bands: bands to keep from sub source. Order is meaningful

[[channels]]
name = "images"
bands = [1, 2, 3]


# Output Classes configuration
# Nota: available colors are either CSS3 colors names or #RRGGBB hexadecimal representation.
# Nota: special color name "transparent" could be use on a single class to apply transparency
# Nota: default weight is 1.0 for each class, or 0.0 if a transparent color one.

[[classes]]
title = "Background"
color = "transparent"

[[classes]]
title = "Building"
color = "deeppink"

[[classes]]
title = "Road"
color = "deepskyblue"


[model]
# Neurals Network name
nn = "Albunet"

# Encoder name
encoder = "resnet50"

# Dataset loader name
loader = "SemSeg"

# Model internal input tile size [W, H]
#ts = [512, 512]


[train]
# Pretrained Encoder
#pretrained = true

# Batch size
#bs = 4

# Data Augmentation to apply, to whole input tensor, with associated probability
da = {name="RGB", p=1.0}

# Loss function name
loss = "Lovasz"

# Eval Metrics
metrics = ["IoU", "MCC", "QoD"]

# Optimizer, cf https://pytorch.org/docs/stable/optim.html
#optimizer = {name="Adam", lr=0.0001}

+ 114
- 0
docs/101.md View File

@@ -0,0 +1,114 @@
# Neat-EO.pink 101

These tutorial allow you in about 2 hours to experiment basic neat-EO usages.


Check Neat-EO.pink installation and GPU
---------------------------------------
```bash
neo info
```


Retrieve DataSet (Subset of <a href="https://www.drivendata.org/competitions/60/building-segmentation-disaster-resilience">Open Cities AI Challenge 2020</a>)
---------------------------------
```bash
wget -nc http://www.datapink.net/neo/101/ds.tar
tar xf ds.tar
```

Configuration file:
-------------------

```bash
echo '
# Inputs
[[channels]]
name = "images"
bands = [1, 2, 3]

# Outputs
[[classes]]
title = "Background"
color = "transparent"

[[classes]]
title = "Building"
color = "deeppink"

# AI stuff
[model]
nn = "Albunet"
loader = "SemSeg"
encoder = "resnet50"

[train]
bs = 4
loss = "Lovasz"
da = {name="RGB", p=1.0}
optimizer = {name="Adam", lr=0.000025}
metrics = ["QoD"]

' > 101.toml


export NEO_CONFIG=101.toml
```


Tile Imagery:
-------------
```bash
neo tile --zoom 19 --bands 1,2,3 --nodata_threshold 25 --rasters train/*/*[^-]/*tif --out train/images
```
<a href="http://www.datapink.net/neo/101/train/images/"><img src="img/101/images.png" /></a>


Retrieve and tile labels accordingly:
-------------------------------------

```bash
neo cover --dir train/images --out train/cover.csv
neo rasterize --geojson train/*/*-labels/*.geojson --type Building --cover train/cover.csv --out train/labels
```
<a href="http://www.datapink.net/neo/101/train/labels/"><img src="img/101/labels.png" /></a>



Launch training :
-----------------

```bash
neo train --dataset train --epochs 5 --out model
neo eval --checkpoint model/checkpoint-00005.pth --dataset train
```


Retrieve, prepare and predict on a new imagery:
-----------------------------------------------
```bash
neo tile --zoom 19 --bands 1,2,3 --nodata_threshold 25 --rasters predict/*/*[^-]/*tif --out predict/images
neo predict --checkpoint model/checkpoint-00005.pth --dataset predict --metatiles --out predict/masks
```
<a href="http://www.datapink.net/neo/101/predict/masks/leaflet.html"><img src="img/101/predict_masks.png" /></a>

Compare our trained model prediction against labels:
----------------------------------------------------
```bash
neo cover --dir predict/masks --out predict/cover.csv
neo rasterize --geojson predict/*/*-labels/*.geojson --type Building --cover predict/cover.csv --out predict/labels
neo compare --mode stack --images predict/images predict/labels predict/masks --cover predict/cover.csv --out predict/compare
neo compare --mode list --labels predict/labels --masks predict/masks --max Building QoD 0.80 --cover predict/cover.csv --geojson --out predict/compare/tiles.json
```
<a href="http://www.datapink.net/neo/101/predict/compare/"><img src="img/101/predict_compare.png" /></a>

```bash
neo compare --mode side --images predict/images predict/compare --cover predict/cover.csv --out predict/compare_side
```
<a href="http://www.datapink.net/neo/101/predict/compare_side/"><img src="img/101/predict_compare_side.png" /></a>

Vectorize results:
------------------
```bash
neo vectorize --masks predict/masks --type Building --out predict/building.json
```

+ 67
- 0
docs/config.md View File

@@ -0,0 +1,67 @@
## config.toml
```
# Neat-EO.pink Configuration


# Input channels configuration
# You can, add several channels blocks to compose your input Tensor. Order is meaningful.
#
# name: dataset subdirectory name
# bands: bands to keep from sub source. Order is meaningful

[[channels]]
name = "images"
bands = [1, 2, 3]


# Output Classes configuration
# Nota: available colors are either CSS3 colors names or #RRGGBB hexadecimal representation.
# Nota: special color name "transparent" could be use on a single class to apply transparency
# Nota: default weight is 1.0 for each class, or 0.0 if a transparent color one.

[[classes]]
title = "Background"
color = "transparent"

[[classes]]
title = "Building"
color = "deeppink"

[[classes]]
title = "Road"
color = "deepskyblue"


[model]
# Neurals Network name
nn = "Albunet"

# Encoder name
encoder = "resnet50"

# Dataset loader name
loader = "SemSeg"

# Model internal input tile size [W, H]
#ts = [512, 512]


[train]
# Pretrained Encoder
#pretrained = true

# Batch size
#bs = 4

# Data Augmentation to apply, to whole input tensor, with associated probability
da = {name="RGB", p=1.0}

# Loss function name
loss = "Lovasz"

# Eval Metrics
metrics = ["IoU", "MCC", "QoD"]

# Optimizer, cf https://pytorch.org/docs/stable/optim.html
#optimizer = {name="Adam", lr=0.0001}
```

BIN
docs/img/101/images.png View File

Before After
Width: 1337  |  Height: 729  |  Size: 2.3MB

BIN
docs/img/101/labels.png View File

Before After
Width: 939  |  Height: 701  |  Size: 79KB

BIN
docs/img/101/predict_compare.png View File

Before After
Width: 1129  |  Height: 735  |  Size: 1.7MB

BIN
docs/img/101/predict_compare_side.png View File

Before After
Width: 1047  |  Height: 526  |  Size: 617KB

BIN
docs/img/101/predict_masks.png View File

Before After
Width: 1028  |  Height: 682  |  Size: 178KB

BIN
docs/img/readme/draw_me_neat_eo.png View File

Before After
Width: 870  |  Height: 533  |  Size: 75KB

BIN
docs/img/readme/stacks.png View File

Before After
Width: 901  |  Height: 621  |  Size: 124KB

+ 20
- 0
docs/makefile.md View File

@@ -0,0 +1,20 @@
## Makefile
```
This Makefile rules are designed for Neat-EO.pink devs and power-users.
For plain user installation follow README.md instructions, instead.


make install To install, few Python dev tools and Neat-EO.pink in editable mode.
So any further Neat-EO.pink Python code modification will be usable at once,
throught either neo tools commands or neat_eo.* modules.

make check Launchs code tests, and tools doc updating.
Do it, at least, before sending a Pull Request.

make check_tuto Launchs neo commands embeded in tutorials, to be sure everything still up to date.
Do it, at least, on each CLI modifications, and before a release.
NOTA: It takes a while.

make pink Python code beautifier,
as Pink is the new Black ^^
```

+ 374
- 0
docs/tools.md View File

@@ -0,0 +1,374 @@
# Neat-EO.pink tools documentation
## neo compare
```
usage: neo compare [-h] [--mode {side,stack,list}] [--labels LABELS]
[--masks MASKS] [--config CONFIG]
[--images IMAGES [IMAGES ...]] [--cover COVER]
[--workers WORKERS] [--min MIN MIN MIN] [--max MAX MAX MAX]
[--vertical] [--geojson] [--format FORMAT] [--out OUT]
[--web_ui_base_url WEB_UI_BASE_URL]
[--web_ui_template WEB_UI_TEMPLATE] [--no_web_ui]

optional arguments:
-h, --help show this help message and exit

Inputs:
--mode {side,stack,list} compare mode [default: side]
--labels LABELS path to tiles labels directory [required for metrics filtering]
--masks MASKS path to tiles masks directory [required for metrics filtering)
--config CONFIG path to config file [required for metrics filtering, if no global config setting]
--images IMAGES [IMAGES ...] path to images directories [required for stack or side modes]
--cover COVER path to csv tiles cover file, to filter tiles to tile [optional]
--workers WORKERS number of workers [default: CPU]

Metrics Filtering:
--min MIN MIN MIN skip tile if class below metric value [0-1] (e.g --min Building QoD 0.7)
--max MAX MAX MAX skip tile if class above metric value [0-1] (e.g --max Building IoU 0.85)

Outputs:
--vertical output vertical image aggregate [optionnal for side mode]
--geojson output results as GeoJSON [optionnal for list mode]
--format FORMAT output images file format [default: webp]
--out OUT output path

Web UI:
--web_ui_base_url WEB_UI_BASE_URL alternate Web UI base URL
--web_ui_template WEB_UI_TEMPLATE alternate Web UI template path
--no_web_ui desactivate Web UI output
```
## neo cover
```
usage: neo cover [-h] [--dir DIR] [--bbox BBOX]
[--geojson GEOJSON [GEOJSON ...]] [--cover COVER]
[--raster RASTER [RASTER ...]] [--sql SQL] [--pg PG]
[--no_xyz] [--zoom ZOOM] [--type {cover,extent,geojson}]
[--union] [--splits SPLITS] [--out [OUT [OUT ...]]]

optional arguments:
-h, --help show this help message and exit

Input [one among the following is required]:
--dir DIR plain tiles dir path
--bbox BBOX a lat/lon bbox: xmin,ymin,xmax,ymax or a bbox: xmin,xmin,xmax,xmax,EPSG:xxxx
--geojson GEOJSON [GEOJSON ...] path to GeoJSON features files
--cover COVER a cover file path
--raster RASTER [RASTER ...] a raster file path
--sql SQL SQL to retrieve geometry features (e.g SELECT geom FROM a_table)

Spatial DataBase [required with --sql input]:
--pg PG PostgreSQL dsn using psycopg2 syntax (e.g 'dbname=db user=postgres')

Tiles:
--no_xyz if set, tiles are not expected to be XYZ based.

Outputs:
--zoom ZOOM zoom level of tiles [required, except with --dir or --cover inputs]
--type {cover,extent,geojson} Output type (default: cover)
--union if set, union adjacent tiles, imply --type geojson
--splits SPLITS if set, shuffle and split in several cover subpieces (e.g 50/15/35)
--out [OUT [OUT ...]] cover output paths [required except with --type extent]
```
## neo dataset
```
usage: neo dataset [-h] [--config CONFIG] --dataset DATASET [--cover COVER]
[--workers WORKERS] [--mode {check,weights}]

optional arguments:
-h, --help show this help message and exit
--config CONFIG path to config file [required, if no global config setting]
--dataset DATASET dataset path [required]
--cover COVER path to csv tiles cover file, to filter tiles dataset on [optional]
--workers WORKERS number of workers [default: CPU]
--mode {check,weights} dataset mode [default: check]
```
## neo download
```
usage: neo download [-h] --url URL [--type {XYZ,WMS}] [--rate RATE]
[--timeout TIMEOUT] [--workers WORKERS] --cover COVER
[--format FORMAT] --out OUT
[--web_ui_base_url WEB_UI_BASE_URL]
[--web_ui_template WEB_UI_TEMPLATE] [--no_web_ui]

optional arguments:
-h, --help show this help message and exit

Web Server:
--url URL URL server endpoint, with: {z}/{x}/{y} or {xmin},{ymin},{xmax},{ymax} [required]
--type {XYZ,WMS} service type [default: XYZ]
--rate RATE download rate limit in max requests/seconds [default: 10]
--timeout TIMEOUT download request timeout (in seconds) [default: 10]
--workers WORKERS number of workers [default: same as --rate value]

Coverage to download:
--cover COVER path to .csv tiles list [required]

Output:
--format FORMAT file format to save images in [default: webp]
--out OUT output directory path [required]

Web UI:
--web_ui_base_url WEB_UI_BASE_URL alternate Web UI base URL
--web_ui_template WEB_UI_TEMPLATE alternate Web UI template path
--no_web_ui desactivate Web UI output
```
## neo eval
```
usage: neo eval [-h] [--config CONFIG] --dataset DATASET [--cover COVER]
[--classes_weights CLASSES_WEIGHTS]
[--tiles_weights TILES_WEIGHTS] [--loader LOADER] [--bs BS]
[--metrics METRICS [METRICS ...]] --checkpoint CHECKPOINT
[--workers WORKERS]

optional arguments:
-h, --help show this help message and exit
--config CONFIG path to config file [required, if no global config setting]

Dataset:
--dataset DATASET dataset path [required]
--cover COVER path to csv tiles cover file, to filter tiles dataset on [optional]
--classes_weights CLASSES_WEIGHTS classes weights separated with comma or 'auto' [optional]
--tiles_weights TILES_WEIGHTS path to csv tiles cover file, with specials weights on [optional]
--loader LOADER dataset loader name [if set override config file value]

Eval:
--bs BS batch size
--metrics METRICS [METRICS ...] metric name (e.g QoD IoU MCC)
--checkpoint CHECKPOINT path to model checkpoint.
--workers WORKERS number of pre-processing images workers, per GPU [default: batch size]
```
## neo export
```
usage: neo export [-h] --checkpoint CHECKPOINT [--type {onnx,jit,pth}]
[--nn NN] [--loader LOADER] [--doc_string DOC_STRING]
[--shape_in SHAPE_IN] [--shape_out SHAPE_OUT]
[--encoder ENCODER] --out OUT

optional arguments:
-h, --help show this help message and exit

Inputs:
--checkpoint CHECKPOINT model checkpoint to load [required]
--type {onnx,jit,pth} output type [default: onnx]

To set or override metadata pth parameters::
--nn NN nn name
--loader LOADER nn loader
--doc_string DOC_STRING nn documentation abstract
--shape_in SHAPE_IN nn shape in (e.g 3,512,512)
--shape_out SHAPE_OUT nn shape_out (e.g 2,512,512)
--encoder ENCODER nn encoder (e.g resnet50)

Output:
--out OUT path to save export model to [required]
```
## neo extract
```
usage: neo extract [-h] --type TYPE --pbf PBF --out OUT

optional arguments:
-h, --help show this help message and exit

Inputs:
--type TYPE OSM feature type to extract (e.g Building, Road) [required]
--pbf PBF path to .osm.pbf file [required]

Output:
--out OUT GeoJSON output file path [required]
```
## neo info
```
usage: neo info [-h] [--version] [--processes] [--checkpoint CHECKPOINT]

optional arguments:
-h, --help show this help message and exit
--version if set, output Neat-EO.pink version only
--processes if set, output GPU processes list
--checkpoint CHECKPOINT if set with a .pth path, output related model metadata

Usages:
To kill GPU processes: neo info --processes | xargs sudo kill -9
```
## neo predict
```
usage: neo predict [-h] [--dataset DATASET] --checkpoint CHECKPOINT
[--config CONFIG] [--cover COVER] --out OUT [--metatiles]
[--keep_borders] [--bs BS] [--workers WORKERS]
[--web_ui_base_url WEB_UI_BASE_URL]
[--web_ui_template WEB_UI_TEMPLATE] [--no_web_ui]

optional arguments:
-h, --help show this help message and exit

Inputs:
--dataset DATASET predict dataset directory path [required]
--checkpoint CHECKPOINT path to the trained model to use [required]
--config CONFIG path to config file [required, if no global config setting]
--cover COVER path to csv tiles cover file, to filter tiles to predict [optional]

Outputs:
--out OUT output directory path [required]
--metatiles if set, use surrounding tiles to avoid margin effects
--keep_borders if set, with --metatiles, force borders tiles to be kept

Performances:
--bs BS batch size [default: CPU/GPU]
--workers WORKERS number of pre-processing images workers, per GPU [default: batch_size]

Web UI:
--web_ui_base_url WEB_UI_BASE_URL alternate Web UI base URL
--web_ui_template WEB_UI_TEMPLATE alternate Web UI template path
--no_web_ui desactivate Web UI output
```
## neo rasterize
```
usage: neo rasterize [-h] --cover COVER [--config CONFIG] --type TYPE
[--geojson GEOJSON [GEOJSON ...]] [--sql SQL] [--pg PG]
[--buffer BUFFER] --out OUT [--append] [--ts TS]
[--workers WORKERS] [--web_ui_base_url WEB_UI_BASE_URL]
[--web_ui_template WEB_UI_TEMPLATE] [--no_web_ui]

optional arguments:
-h, --help show this help message and exit

Inputs [either --sql or --geojson is required]:
--cover COVER path to csv tiles cover file [required]
--config CONFIG path to config file [required, if no global config setting]
--type TYPE type of features to rasterize (i.e class title) [required]
--geojson GEOJSON [GEOJSON ...] path to GeoJSON features files
--sql SQL SQL to retrieve geometry features [e.g SELECT geom FROM table WHERE ST_Intersects(TILE_GEOM, geom)]
--pg PG If set, override config PostgreSQL dsn.
--buffer BUFFER Add a Geometrical Buffer around each Features (distance in meter)

Outputs:
--out OUT output directory path [required]
--append Append to existing tile if any, useful to multiclasses labels
--ts TS output tile size [default: 512,512]

Performances:
--workers WORKERS number of workers [default: CPU]

Web UI:
--web_ui_base_url WEB_UI_BASE_URL alternate Web UI base URL
--web_ui_template WEB_UI_TEMPLATE alternate Web UI template path
--no_web_ui desactivate Web UI output
```
## neo subset
```
usage: neo subset [-h] --dir DIR --cover COVER [--copy] [--delete] [--quiet]
[--out [OUT]] [--web_ui_base_url WEB_UI_BASE_URL]
[--web_ui_template WEB_UI_TEMPLATE] [--no_web_ui]

optional arguments:
-h, --help show this help message and exit

Inputs:
--dir DIR to XYZ tiles input dir path [required]
--cover COVER path to csv cover file to filter dir by [required]

Alternate modes, as default is to create relative symlinks:
--copy copy tiles from input to output
--delete delete tiles listed in cover

Output:
--quiet if set, suppress warning output
--out [OUT] output dir path [required for copy]

Web UI:
--web_ui_base_url WEB_UI_BASE_URL alternate Web UI base URL
--web_ui_template WEB_UI_TEMPLATE alternate Web UI template path
--no_web_ui desactivate Web UI output
```
## neo tile
```
usage: neo tile [-h] --rasters RASTERS [RASTERS ...] [--cover COVER]
[--bands BANDS] --zoom ZOOM [--ts TS] [--nodata [0-255]]
[--nodata_threshold [0-100]] [--keep_borders]
[--format FORMAT] --out OUT [--label] [--config CONFIG]
[--workers WORKERS] [--web_ui_base_url WEB_UI_BASE_URL]
[--web_ui_template WEB_UI_TEMPLATE] [--no_web_ui]

optional arguments:
-h, --help show this help message and exit

Inputs:
--rasters RASTERS [RASTERS ...] path to raster files to tile [required]
--cover COVER path to csv tiles cover file, to filter tiles to tile [optional]
--bands BANDS list of 1-n index bands to select (e.g 1,2,3) [optional]

Output:
--zoom ZOOM zoom level of tiles [required]
--ts TS tile size in pixels [default: 512,512]
--nodata [0-255] nodata pixel value, used by default to remove coverage border's tile [default: 0]
--nodata_threshold [0-100] Skip tile if nodata pixel ratio > threshold. [default: 100]
--keep_borders keep tiles even if borders are empty (nodata)
--format FORMAT file format to save images in (e.g jpeg)
--out OUT output directory path [required]

Labels:
--label if set, generate label tiles
--config CONFIG path to config file [required with --label, if no global config setting]

Performances:
--workers WORKERS number of workers [default: raster files]

Web UI:
--web_ui_base_url WEB_UI_BASE_URL alternate Web UI base URL
--web_ui_template WEB_UI_TEMPLATE alternate Web UI template path
--no_web_ui desactivate Web UI output
```
## neo train
```
usage: neo train [-h] [--config CONFIG] --dataset DATASET [--cover COVER]
[--classes_weights CLASSES_WEIGHTS]
[--tiles_weights TILES_WEIGHTS] [--loader LOADER] [--bs BS]
[--lr LR] [--ts TS] [--nn NN] [--encoder ENCODER]
[--optimizer OPTIMIZER] [--loss LOSS] [--epochs EPOCHS]
[--resume] [--checkpoint CHECKPOINT] [--workers WORKERS]
[--saving SAVING] --out OUT

optional arguments:
-h, --help show this help message and exit
--config CONFIG path to config file [required, if no global config setting]

Dataset:
--dataset DATASET train dataset path [required]
--cover COVER path to csv tiles cover file, to filter tiles dataset on [optional]
--classes_weights CLASSES_WEIGHTS classes weights separated with comma or 'auto' [optional]
--tiles_weights TILES_WEIGHTS path to csv tiles cover file, to apply weights on [optional]
--loader LOADER dataset loader name [if set override config file value]

Hyper Parameters [if set override config file value]:
--bs BS batch size
--lr LR learning rate
--ts TS tile size
--nn NN neurals network name
--encoder ENCODER encoder name
--optimizer OPTIMIZER optimizer name
--loss LOSS model loss

Training:
--epochs EPOCHS number of epochs to train
--resume resume model training, if set imply to provide a checkpoint
--checkpoint CHECKPOINT path to a model checkpoint. To fine tune or resume a training
--workers WORKERS number of pre-processing images workers, per GPU [default: batch size]

Output:
--saving SAVING number of epochs beetwen checkpoint saving [default: 1]
--out OUT output directory path to save checkpoint and logs [required]
```
## neo vectorize
```
usage: neo vectorize [-h] --masks MASKS --type TYPE [--config CONFIG] --out
OUT

optional arguments:
-h, --help show this help message and exit

Inputs:
--masks MASKS input masks directory path [required]
--type TYPE type of features to extract (i.e class title) [required]
--config CONFIG path to config file [required, if no global config setting]

Outputs:
--out OUT path to output file to store features in [required]
```

+ 1
- 0
neat_eo/__init__.py View File

@@ -0,0 +1 @@
__version__ = "0.7.4-dev"

+ 203
- 0
neat_eo/core.py View File

@@ -0,0 +1,203 @@
import os
import sys
import glob
import toml
from importlib import import_module

import re
import colorsys
import webcolors
from pathlib import Path

from neat_eo.tiles import tile_pixel_to_location, tiles_to_geojson


#
# Import module
#
def load_module(module):
module = import_module(module)
assert module, "Unable to import module {}".format(module)
return module


#
# Config
#
def load_config(path):
"""Loads a dictionary from configuration file."""

if not path and "NEO_CONFIG" in os.environ:
path = os.environ["NEO_CONFIG"]
if not path and os.path.isfile(os.path.expanduser("~/.neo_config")):
path = "~/.neo_config"
assert path, "Either ~/.neo_config or NEO_CONFIG env var or --config parameter, is required."

config = toml.load(os.path.expanduser(path))
assert config, "Unable to parse config file"

# Set default values

if "model" not in config.keys():
config["model"] = {}

if "ts" not in config["model"].keys():
config["model"]["ts"] = (512, 512)

if "train" not in config.keys():
config["train"] = {}

if "pretrained" not in config["train"].keys():
config["train"]["pretrained"] = True

if "bs" not in config["train"].keys():
config["train"]["bs"] = 4

if "auth" not in config.keys():
config["auth"] = {}

if "da" in config["train"].keys():
config["train"]["da"] = dict(config["train"]["da"]) # dict is serializable

if "optimizer" in config["train"].keys():
config["train"]["optimizer"] = dict(config["train"]["optimizer"]) # dict is serializable
else:
config["train"]["optimizer"] = {"name": "Adam", "lr": 0.0001}

assert "classes" in config.keys(), "CONFIG: Classes are mandatory"
for c, classe in enumerate(config["classes"]):
config["classes"][c]["weight"] = config["classes"][c]["weight"] if "weight" in config["classes"][c].keys() else 1.0
if config["classes"][c]["color"] == "transparent" and "weight" not in config["classes"][c].keys():
config["classes"][c]["weight"] = 0.0

return config


def check_channels(config):
assert "channels" in config.keys(), "CONFIG: At least one Channel is mandatory"

# TODO


def check_classes(config):
"""Check if config file classes subpart is consistent. Exit on error if not."""

assert "classes" in config.keys() and len(config["classes"]) >= 2, "CONFIG: At least 2 Classes are mandatory"

for classe in config["classes"]:
assert "title" in classe.keys() and len(classe["title"]), "CONFIG: Missing or Empty classes.title value"
assert "color" in classe.keys() and check_color(classe["color"]), "CONFIG: Missing or Invalid classes.color value"


def check_model(config):

assert "model" in config.keys(), "CONFIG: Missing or Invalid model"

# TODO


#
# Logs
#
class Logs:
def __init__(self, path, out=sys.stderr):
"""Create a logs instance on a logs file."""

self.fp = None
self.out = out
if path:
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path), exist_ok=True)
self.fp = open(path, mode="a")

def log(self, msg):
"""Log a new message to the opened logs file, and optionnaly on stdout or stderr too."""
if self.fp:
self.fp.write(msg + os.linesep)
self.fp.flush()

if self.out:
print(msg, file=self.out)


#
# Colors
#
def make_palette(colors, complementary=False):
"""Builds a PNG PIL color palette from Classes CSS3 color names, or hex values patterns as #RRGGBB."""

assert 0 < len(colors) < 256

try:
transparency = [key for key, color in enumerate(colors) if color == "transparent"][0]
except:
transparency = None

colors = ["white" if color.lower() == "transparent" else color for color in colors]
hex_colors = [webcolors.CSS3_NAMES_TO_HEX[color.lower()] if color[0] != "#" else color for color in colors]
rgb_colors = [(int(h[1:3], 16), int(h[3:5], 16), int(h[5:7], 16)) for h in hex_colors]

palette = list(sum(rgb_colors, ())) # flatten
palette = palette if not complementary else complementary_palette(palette)

return palette, transparency


def complementary_palette(palette):
"""Creates a PNG PIL complementary colors palette based on an initial PNG PIL palette."""

comp_palette = []
colors = [palette[i : i + 3] for i in range(0, len(palette), 3)]

for color in colors:
r, g, b = [v for v in color]
h, s, v = colorsys.rgb_to_hsv(r, g, b)
comp_palette.extend(map(int, colorsys.hsv_to_rgb((h + 0.5) % 1, s, v)))