Skip to content

Commit

Permalink
JH review
Browse files Browse the repository at this point in the history
  • Loading branch information
anikaweinmann committed Dec 15, 2023
1 parent 291ae1c commit 32eceef
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 7 deletions.
16 changes: 12 additions & 4 deletions DSM/BE/BE_DSM_tindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@
# Usage:
# Set Parameter `GDALTINDEX`
# `GDALTINDEX=True`: using `gdaltindex` (more correct limits of the tiles)
# `GDALTINDEX=False`: creating the tiles out of the file names (only small improvement in runtime, because you still have to check with `gdalinfo` whether the tiles are valid files at all )
# `GDALTINDEX=False`: creating the tiles out of the file names (only small
# improvement in runtime, because you still have to check
# with `gdalinfo` whether the tiles are valid files at all)
# Then call script like this:
# python3 DSM/BE/BE_DSM_tindex.py
# Output:
Expand All @@ -39,10 +41,14 @@
import json

from osgeo import gdal
from remotezip import RemoteZip


# Parameter for Berlin DOM txt files
URL = "https://fbinter.stadt-berlin.de/fb/berlin/service_intern.jsp?id=a_dom1@senstadt&type=FEED"
URL = (
"https://fbinter.stadt-berlin.de/fb/berlin/service_intern.jsp?"
"id=a_dom1@senstadt&type=FEED"
)
GREP_STR = "https://fbinter.stadt-berlin.de/fb/atom/DOM/DOM1"
EPSG_CODE = 25833
FILE_EXTENSION = ".txt"
Expand Down Expand Up @@ -131,11 +137,13 @@ def create_tindex_by_gdaltindex(data_list):
raise Exception("lynx required, please install lynx first")

# full tile index with 35860 NRW DOPs
get_data_cmd = f"lynx -dump -nonumbers -listonly '{URL}' | grep {GREP_STR} | grep 'zip$' | sed 's+^+{VSI_PART}+g'"
get_data_cmd = (
f"lynx -dump -nonumbers -listonly '{URL}' | grep {GREP_STR} | grep 'zip$' "
f"| sed 's+^+{VSI_PART}+g'"
)
stream = os.popen(get_data_cmd)
data_str = stream.read()
data_list = []
from remotezip import RemoteZip
for data in data_str.split():
with RemoteZip(data.replace(VSI_PART, "")) as zip:
for zip_info in zip.infolist():
Expand Down
15 changes: 12 additions & 3 deletions DTM/BE/BE_DTM_tindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@
# Usage:
# Set Parameter `GDALTINDEX`
# `GDALTINDEX=True`: using `gdaltindex` (more correct limits of the tiles)
# `GDALTINDEX=False`: creating the tiles out of the file names (only small improvement in runtime, because you still have to check with `gdalinfo` whether the tiles are valid files at all )
# `GDALTINDEX=False`: creating the tiles out of the file names (only small
# improvement in runtime, because you still have to check
# with `gdalinfo` whether the tiles are valid files at all)
# Then call script like this:
# python3 DTM/BE/BE_DTM_tindex.py
# Output:
Expand All @@ -42,13 +44,17 @@


# Parameter for Berlin DGM XYZ files
URL = "https://fbinter.stadt-berlin.de/fb/berlin/service_intern.jsp?id=a_dgm@senstadt&type=FEED"
URL = (
"https://fbinter.stadt-berlin.de/fb/berlin/service_intern.jsp?"
"id=a_dgm@senstadt&type=FEED"
)
GREP_STR = "https://fbinter.stadt-berlin.de/fb/atom/DGM1/DGM1_"
EPSG_CODE = 25833
FILE_EXTENSION = ".xyz"
TILE_SIZE = 2000
OUTPUT_FILE = "be_dgm_tindex_proj.gpkg.gz"
GDALTINDEX = True
VSI_PART = "/vsizip/vsicurl/"
os.chdir("DTM/BE/")


Expand Down Expand Up @@ -130,7 +136,10 @@ def create_tindex_by_gdaltindex(data_list):
raise Exception("lynx required, please install lynx first")

# full tile index with 35860 NRW DOPs
get_data_cmd = f"lynx -dump -nonumbers -listonly '{URL}' | grep {GREP_STR} | grep 'zip$' | sed 's+^+/vsizip/vsicurl/+g'"
get_data_cmd = (
f"lynx -dump -nonumbers -listonly '{URL}' | grep {GREP_STR} | grep 'zip$' "
f"| sed 's+^+{VSI_PART}+g'"
)
stream = os.popen(get_data_cmd)
data_str = stream.read()
data_list = [
Expand Down

0 comments on commit 32eceef

Please sign in to comment.