Skip to content

Commit

Permalink
Merge pull request #9 from mundialis/add_retry_option
Browse files Browse the repository at this point in the history
Add retry option
  • Loading branch information
linakrisztian authored Sep 4, 2024
2 parents 4532972 + 0eda23d commit a35e922
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 6 deletions.
67 changes: 61 additions & 6 deletions r.in.wcs.worker/r.in.wcs.worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
############################################################################
#
# MODULE: r.in.wcs.worker
# AUTHOR(S): Anika Weinmann
# AUTHOR(S): Anika Weinmann, Lina Krisztian
#
# PURPOSE: Worker addon of r.in.wcs which imports GetCoverage from a WCS
# server via requests
# COPYRIGHT: (C) 2023 by Anika Weinmann, mundialis GmbH & Co. KG and the
# COPYRIGHT: (C) 2023-2024 by Anika Weinmann, mundialis GmbH & Co. KG and the
# GRASS Development Team
#
# This program is free software; you can redistribute it and/or modify
Expand Down Expand Up @@ -83,12 +83,22 @@
# % label: Type for subset settings e.g. "Lat Long"
# %end

# %option
# % key: num_retry
# % type: integer
# % required: no
# % answer: 0
# % multiple: no
# % label: Maximum number of download retries
# %end

# %rules
# % collective: username,password
# %end

import atexit
import os
from time import sleep
import sys

from urllib.request import urlretrieve
Expand All @@ -99,6 +109,7 @@

try:
from grass_gis_helpers.mapset import switch_to_new_mapset
from grass_gis_helpers.validation import get_gdalinfo_returncodes
except ImportError:
grass.fatal(
_(
Expand Down Expand Up @@ -144,6 +155,7 @@ def main():
wcs_url = options["url"]
coverageid = options["coverageid"]
area = f"{options['area']}@{old_mapset}"
num_retry_max = int(options["num_retry"])

# setting region to area
grass.run_command("g.region", vector=area, res=res)
Expand All @@ -161,10 +173,53 @@ def main():
os.remove(tif)
tif = tif.replace(".0", ".tif")
RM_FILES.append(tif)
try:
urlretrieve(url, tif)
except URLError:
grass.fatal(_(f"Failed to reach the server.\nURL: {url}"))

num_retry = 0
while num_retry <= num_retry_max:
try:
urlretrieve(url, tif)
gdalinfo_err, gdalinfo_returncode = get_gdalinfo_returncodes(tif)
if (
gdalinfo_returncode != 0
or ("TIFFReadEncodedStrip" in gdalinfo_err)
or ("TIFFReadEncodedTile" in gdalinfo_err)
):
if num_retry == num_retry_max:
grass.fatal(
_(
"Failed to download tif after "
f"{num_retry_max} retries."
)
)
grass.warning(
_(
f"Broken tif downloaded, with error {gdalinfo_err}."
" Try to re-download. Retry "
f"{num_retry}/{num_retry_max} ..."
)
)
sleep(5)
os.remove(tif)
num_retry += 1
else:
break
except URLError as e:
if num_retry == num_retry_max:
grass.fatal(
_(
f"Failed to reach the server.\nURL: {url} "
f"after {num_retry_max} retries."
)
)
grass.warning(
_(
f"Failed to reach the server.\nURL: {url}. With Error {e}. "
f"Retry {num_retry}/{num_retry_max} ..."
)
)
sleep(5)
num_retry += 1

grass.run_command("r.import", input=tif, output=options["output"])
grass.message(
_(f"WCS Coverage {coverageid} is impored as {options['output']}")
Expand Down
10 changes: 10 additions & 0 deletions r.in.wcs/r.in.wcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,15 @@
# % description: If it is to large the download can fail; The download is depending on the tile_size and the resoulution of the coverage.
# %end

# %option
# % key: num_retry
# % type: integer
# % required: no
# % answer: 0
# % multiple: no
# % label: Maximum number of download retries
# %end

# %option G_OPT_M_NPROCS
# % description: Number of cores for multiprocessing, -2 is the number of available cores - 1
# % answer: -2
Expand Down Expand Up @@ -188,6 +197,7 @@ def main():
"subset_type": axis_label,
"username": options["username"],
"password": options["password"],
"num_retry": options["num_retry"],
}
# create tiles
tmp_id = grass.tempname(12)
Expand Down

0 comments on commit a35e922

Please sign in to comment.