Skip to content

Commit

Permalink
Use deepforest config file and deepforest's boxes_to_shapefile
Browse files Browse the repository at this point in the history
  • Loading branch information
henrykironde committed Apr 1, 2024
1 parent 214fc17 commit a43369a
Show file tree
Hide file tree
Showing 4 changed files with 44 additions and 35 deletions.
40 changes: 40 additions & 0 deletions deepforest_config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Config file for DeepForest pytorch module

# Cpu workers for data loaders
# Dataloaders
workers: 1
devices: 1
accelerator: 'gpu'
batch_size: 1

# Model Architecture
architecture: 'retinanet'
num_classes: 1
nms_thresh: 0.05

# Architecture specific params
retinanet:
# Non-max supression of overlapping predictions
score_thresh: 0.1

train:
csv_file:
root_dir:

# Optimizer initial learning rate
lr: 0.001

# Print loss every n epochs
epochs: 1
# Useful debugging flag in pytorch lightning, set to True to get a single batch of training to test settings.
fast_dev_run: False
# pin images to GPU memory for fast training. This depends on GPU size and number of images.
preload_images: False

validation:
# callback args
csv_file:
root_dir:
# Intersection over union evaluation
iou_threshold: 0.4
val_accuracy_interval: 20
6 changes: 3 additions & 3 deletions everglades_dryrun_workflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#SBATCH [email protected]
#SBATCH --mail-type=FAIL
#SBATCH --gpus=a100:1
#SBATCH --cpus-per-task=10
#SBATCH --cpus-per-task=3
#SBATCH --mem=200gb
#SBATCH --time=01:30:00
#SBATCH --partition=gpu
Expand All @@ -23,5 +23,5 @@ cd /blue/ewhite/everglades/EvergladesTools/Zooniverse

snakemake --unlock
echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] Starting Snakemake pipeline"
snakemake --printshellcmds --keep-going --cores 10 --resources gpu=1 --rerun-incomplete --latency-wait 1 --use-conda
echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] End"
snakemake --printshellcmds --keep-going --cores 3 --resources gpu=1 --rerun-incomplete --latency-wait 1 --use-conda
echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] End"
2 changes: 1 addition & 1 deletion everglades_workflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ cd /blue/ewhite/everglades/EvergladesTools/Zooniverse
snakemake --unlock
echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] Starting Snakemake pipeline"
snakemake --printshellcmds --keep-going --cores 30 --resources gpu=1 --rerun-incomplete --latency-wait 10 --use-conda
echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] End"
echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] End"
31 changes: 0 additions & 31 deletions predict.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,37 +11,6 @@
from deepforest.utilities import boxes_to_shapefile


def project(raster_path, boxes):
"""
Convert image coordinates into a geospatial object to overlap with input image.
Args:
raster_path: path to the raster .tif on disk. Assumed to have a valid spatial projection
boxes: a prediction pandas dataframe from deepforest.predict_tile()
Returns:
a geopandas dataframe with predictions in input projection.
"""
with rasterio.open(raster_path) as dataset:
bounds = dataset.bounds
pixelSizeX, pixelSizeY = dataset.res

# subtract origin. Recall that numpy origin is top left! Not bottom left.
boxes["xmin"] = (boxes["xmin"] * pixelSizeX) + bounds.left
boxes["xmax"] = (boxes["xmax"] * pixelSizeX) + bounds.left
boxes["ymin"] = bounds.top - (boxes["ymin"] * pixelSizeY)
boxes["ymax"] = bounds.top - (boxes["ymax"] * pixelSizeY)

# create geometry column
boxes['geometry'] = boxes.apply(lambda x: shapely.geometry.box(x.xmin, x.ymin, x.xmax, x.ymax), axis=1)

# drop unnecessary columns
boxes = boxes.drop(columns=['xmin', 'ymin', 'xmax', 'ymax'])

# Create a GeoDataFrame
gdf = geopandas.GeoDataFrame(boxes, geometry='geometry')

return gdf


def run(proj_tile_path, checkpoint_path, savedir="."):
"""Apply trained model to a drone tile"""

Expand Down

0 comments on commit a43369a

Please sign in to comment.