Index: doc/conf.py =================================================================== diff -u -r1d60052392dea0a4f8c90e7619b869627cc35a35 -re24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b --- doc/conf.py (.../conf.py) (revision 1d60052392dea0a4f8c90e7619b869627cc35a35) +++ doc/conf.py (.../conf.py) (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -210,22 +210,6 @@ # -- Options for LaTeX output -------------------------------------------------- -latex_elements = { - 'papersize': 'a4paper', - 'fontpkg': '', - 'maketitle': '\maketitle', - 'fncychap': '', - 'pointsize': '10pt', - 'preamble': '', - 'releasename': "", - 'babel': '\usepackage[english]{babel}', - 'printindex': '', - 'fontenc': '', - 'docclass': '', - 'inputenc': '', - 'classoptions': '', - 'utf8extra': '', -} # Grouping the document tree into LaTeX files. List of tuples Index: wflow-py/wflow/wtools_py/CatchRiver.py =================================================================== diff -u --- wflow-py/wflow/wtools_py/CatchRiver.py (revision 0) +++ wflow-py/wflow/wtools_py/CatchRiver.py (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -0,0 +1,330 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Mar 17 09:22:23 2015 + +@author: tollena +""" + +from subprocess import call +from osgeo import osr +import os +import sys +import pcraster as pcr +import getopt +import ConfigParser +import shutil +import numpy as np +import glob +try: + from osgeo import ogr +except ImportError: + import ogr +try: + from osgeo import gdal + from osgeo.gdalconst import * +except ImportError: + import gdal + from gdalconst import * + +import wflowtools_lib as wt + +Driver = ogr.GetDriverByName("ESRI Shapefile") + +def Usage(): + print('') + print('Usage: CatchRiver [-d dem (raster)] [-l burn_line (shape)] [-p burn_point (shape)] [-a burn_area (shape)] [-R riverout (shape)] [-C catchmentout (shape)] [-O min strahler order (integer)] -B -S -K') + print '-d digital elevation model (GeoTiff)' + print '-l polylines (rivers) to be burned in the DEM (optional) (ESRI Shapefile)' + print '-p points (outlets) to be burned in the DEM (optional) (ESRI Shapefile)' + print '-F factor by which the cell-size should be scaled (default=1)' + print '-O minimal strahler order in river shapefile (optional, default=3) (integer)' + print '-s option to snap points (-p) to lines (-l) (default=no)' + print '-R name of output river (optional, default = river.shp) (ESRI Shapefile)' + print '-C name of output catchment (optional, default = catchment.shp) (ESRI Shapefile)' + print '-B burn value by which DEM will be lowered if burned (optional, default=1000) (integer)' + print '-S option to skip generation of LDD (default=no)' + print '-K option to keep all catchments and river networks (default=no)' + print '-I option to force "lddin" (default=no)' + + print '' + +def removeshp(shapein,directory): + if os.path.exists(directory+shapein): + print shapein + ' exists and will be deleted' + Driver.DeleteDataSource(directory+shapein) + shapein = directory+shapein + else: shapein = directory+shapein + if os.path.exists(directory+shapein): + print 'failed to remove ' + directory+shapein + counter = 1 + stopcounting = False + shp_att = os.path.splitext(os.path.basename(shapein))[0] + while not stopcounting: + filename = shp_att + '(' + str(counter) + ')' + '.shp' + if not os.path.exists(directory+filename): + shapein = directory+filename + stopcounting = True + else: counter += 1 + print 'filename used: ' + shapein + return shapein + +#def main(): +workdir = "work\\" +resultdir = "CatchRiver\\" + +''' read commandline arguments ''' +argv = sys.argv +#argv = ['x','-d', '..\input\DEM_5M_filled.tif','-F','100','-R','river_test.shp',] + + +try: + opts, args = getopt.getopt(argv[1:], 'd:l:p:F:a:R:C:O:B:SsKI') +except getopt.error: + print 'fout' + Usage() + sys.exit(1) + +dem_in = None +rivshp = None +catchshp = None +lineshp = None +areashp = None +pointshp = None +minorder = None +snapgaugestoriver = False +scalefactor = None +skipldd = False +EPSG = None +srs = None +keepall = False +lddin = False +burnvalue = 1000 + +for o, a in opts: + if o == '-d': dem_in = a + if o == '-l': lineshp = a + if o == '-F': scalefactor = a + if o == '-p': pointshp = a + if o == '-R': rivshp = a + if o == '-C': catchshp = a + if o == '-O': minorder = int(a) + if o == '-S': skipldd = True + if o == '-s': snapgaugestoriver = True + if o == '-B': burnvalue = float(a) + if o == '-K': keepall = True + if o == '-I': lddin = True + +''' check if files exist ''' +if dem_in == None: + if not skipldd: + print 'please provide dem' + Usage() + sys.exit(1) +else: + if not os.path.exists(dem_in): + print 'file ' + dem_in + print 'Your DEM does not exist in the file-system' + print '' + sys.exit(1) +if not pointshp == None: + if not os.path.exists(pointshp): + print 'file ' + pointshp + print 'Your point-shape does not exist in the file-system' + print '' + sys.exit(1) +if not lineshp == None: + if not os.path.exists(lineshp): + print 'file ' + lineshp + print 'Your line-shape does not exist in the file-system' + print '' + sys.exit(1) + +''' set property values ''' +if minorder == None: + print 'no minimum strahler order specified' + print 'default will be used: 5' + minorder = int(5) + +if burnvalue == None: + print 'no value for burning defined' + print 'default will be used: 1000 (map units)' + print 'pits will be filled till 500 (map units)' + burnvalue = float(1000) + +if rivshp == None: + print 'default name for river shape will be used: river.shp' + rivshp = 'river.shp' + +if catchshp == None: + print 'default name for river shape will be used: catchment.shp' + catchshp = 'catchments.shp' + +if not dem_in == None: + ds = gdal.Open(dem_in) + if ds == None: + print 'Input file specified not available or not a raster' + sys.exit(1) + else: + spatialref = ds.GetProjection() + srs = osr.SpatialReference() + if (srs == None) or (spatialref == ''): + print 'Your DEM is not projected' + sys.exit(1) + print srs + srs.ImportFromWkt(spatialref) + srs.AutoIdentifyEPSG() + EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) + cellsize = ds.GetGeoTransform()[1] + #transform = ds.GetGeoTransform() + #ds = None +else: + print 'no DEM provided, no projection will be assigned to output' + +''' create directories ''' +if not skipldd: + if os.path.isdir(workdir): + try: + shutil.rmtree(workdir) + except: + print 'cannot remove work directory' + print 'probably blocked by other process' + sys.exit(1) + os.makedirs(workdir) + +if not os.path.isdir(resultdir): + os.makedirs(resultdir) + +rivshp = removeshp(rivshp,resultdir) +catchshp = removeshp(catchshp,resultdir) + + +''' convert and read DEM ''' +if not skipldd: + dem_map = workdir + 'dem.map' + if not scalefactor == None: + cellsizescaled = float(cellsize)*float(scalefactor) + dem_scaled = workdir + 'dem_scaled.tif' + call(('gdalwarp','-overwrite','-s_srs',EPSG,'-t_srs',EPSG,'-tr',str(cellsizescaled),str(-cellsizescaled), '-dstnodata', str(-9999),'-r','cubic',dem_in, dem_scaled)) + dem_in = dem_scaled + call(('gdal_translate','-of','PCRaster','-a_srs',EPSG,'-ot','Float32',dem_in,dem_map)) + dem = pcr.readmap(dem_map) + lines = dem * 0 + points = dem * 0 + #create mask (if needed) + burndem = False + if not (lineshp == None and areashp == None and pointshp == None): + clone_map = workdir + 'clone.map' + clone = dem * 0 + burn = pcr.cover(dem * 0,pcr.scalar(0)) + #pcr.report(burn,'burn1.map') + pcr.report(clone,clone_map) + burndem = True + #burn lines + if not lineshp == None: + file_att = os.path.splitext(os.path.basename(lineshp))[0] + line_tif = workdir + 'line.tif' + line_map = workdir + 'line.map' + call(('gdal_translate','-of','GTiff','-a_srs',EPSG,'-ot','Float32',clone_map,line_tif)) + call(('gdal_rasterize','-burn','1','-l',file_att,lineshp,line_tif)) + call(('gdal_translate','-of','PCRaster','-a_srs',EPSG,'-ot','Float32',line_tif,line_map)) + lines = pcr.scalar(pcr.readmap(line_map)) + burn = burn - (pcr.scalar(lines) * pcr.scalar(burnvalue)) + #pcr.report(burn,'burn2.map') + #burn points + if not pointshp == None: + file_att = os.path.splitext(os.path.basename(pointshp))[0] + point_tif = workdir + 'point.tif' + point_map = workdir + 'point.map' + call(('gdal_translate','-of','GTiff','-a_srs',EPSG,'-ot','Float32',clone_map,point_tif)) + call(('gdal_rasterize','-burn','1','-l',file_att,pointshp,point_tif)) + call(('gdal_translate','-of','PCRaster','-a_srs',EPSG,'-ot','Float32',point_tif,point_map)) + points = pcr.scalar(pcr.readmap(point_map)) + if snapgaugestoriver: + print "Snapping points to line" + points= wt.snaptomap(pcr.ordinal(points),pcr.boolean(lines)) + points= pcr.cover(pcr.scalar(points),pcr.scalar(0)) + points = pcr.cover(points, pcr.scalar(0)) + #pcr.report(points,'points.map') + burn = burn - (points * pcr.scalar(burnvalue)*2) + #pcr.report(burn,'burn3.map') + +''' create ldd ''' +pcr.setglobaloption("lddout") +if lddin: + pcr.setglobaloption("lddin") +ldd_map = workdir + 'ldd.map' +streamorder_map = workdir + 'streamorder.map' +river_map = workdir + 'river.map' +catchments_map = workdir + 'catchments.map' +catchments_tif = workdir + 'catchments.tif' +#catchments_shp = resultdir + 'catchments.shp' + +generateldd = True + +if skipldd: + print 'Option -S is set' + print 'ldd will be read from ' + ldd_map + if os.path.exists(ldd_map): + ldd = pcr.ldd(pcr.readmap(ldd_map)) + generateldd = False + else: + print 'file ' + ldd_map + ' does not exist' + print 'new ldd will be generated' + +if generateldd: + print 'Generating ldd...' + if burndem: + linescover = pcr.ifthen(lines==1,pcr.scalar(0)) + pointscover = pcr.ifthen(pcr.scalar(points)==1,pcr.scalar(0)) + #pcr.report(linescover,'lines.map') + #pcr.report(pointscover,'points.map') + dem = pcr.cover(dem,linescover,pointscover) + #pcr.report(dem,'dem1.map') + dem = dem + burn + #pcr.report(dem,'dem2.map') + ldd = pcr.lddcreate(dem,float("1E35"),float("1E35"),float("1E35"),float("1E35")) + else: + ldd = pcr.lddcreate(dem,burnvalue/2,float("1E35"),float("1E35"),float("1E35")) + +streamorder = pcr.ordinal(pcr.streamorder(ldd)) +river = pcr.boolean(pcr.ifthen(streamorder >= int(min(np.max(pcr.pcr2numpy(streamorder,-9999)),minorder)), streamorder)) +outlets = pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.boolean(1)) +outlets = pcr.nominal(pcr.uniqueid(outlets)) +catchments = pcr.nominal(pcr.catchment(ldd, outlets)) + +if not keepall: + catchments = pcr.nominal(pcr.ifthen(pcr.mapmaximum(pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments))) == pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments)),catchments)) + +pcr.report(ldd,ldd_map) +pcr.report(streamorder,streamorder_map) +pcr.report(river,river_map) +pcr.report(catchments,catchments_map) +if not EPSG == None: + call(('gdal_translate','-of','GTiff','-stats','-a_srs',EPSG,'-ot','Float32',catchments_map,catchments_tif)) +else: call(('gdal_translate','-of','GTiff','-stats','-ot','Float32',catchments_map,catchments_tif)) +wt.Raster2Pol(catchments_tif,catchshp,srs) + +riversid_map = workdir + 'riverid.map' +drain_map = workdir + 'drain.map' +ldd_mask = pcr.ifthen(river, ldd) +upstream = pcr.upstream(ldd_mask, pcr.scalar(river)) +downstream = pcr.downstream(ldd_mask, upstream) +#pcr.report(downstream,'downstream.map') +confluences = pcr.boolean(pcr.ifthen(downstream >= 2, pcr.boolean(1))) +#pcr.report(confluences,'confluences.map') +boundaries = pcr.boolean(pcr.ifthen(pcr.scalar(ldd_mask) == 5, pcr.boolean(1))) +catch_points = pcr.nominal(pcr.uniqueid(pcr.cover(confluences,boundaries))) +catchmentsid = pcr.nominal(pcr.subcatchment(ldd, catch_points)) +drain = pcr.accuflux(ldd_mask, 1) +riversid = pcr.ifthen(river, catchmentsid) + +if not keepall: + riversid = pcr.nominal(pcr.ifthen(pcr.mapmaximum(pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments))) == pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments)),riversid)) + +pcr.report(riversid,riversid_map) +pcr.report(drain,drain_map) + +print 'converting river map-file to shape-file...' +wt.PCR_river2Shape(riversid_map, drain_map,streamorder_map,ldd_map,rivshp,catchments_map,srs) +#if __name__ == "__main__": +# main() \ No newline at end of file Index: wflow-py/wflow/wtools_py/CheckInput.py =================================================================== diff -u --- wflow-py/wflow/wtools_py/CheckInput.py (revision 0) +++ wflow-py/wflow/wtools_py/CheckInput.py (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Mar 17 19:08:52 2015 + +@author: tollena +""" + +import wflowtools_lib as wt +import getopt +import os +import shutil +import sys + +def Usage(): + print('') + print('Usage: CheckInput -h [-r river] ') + print '-r rivers to be checked for network (optional) (ESRI Shapefile)' + print('') + +def main(): + argv = sys.argv + #argv = ['x','-r', 'river_selec.shp'] + + + rivershp= None + + resultdir = 'check\\' + if os.path.isdir(resultdir): + shutil.rmtree(resultdir) + os.makedirs(resultdir) + + try: + opts, args = getopt.getopt(argv[1:], 'hr:') + except getopt.error: + print 'fout' + Usage() + sys.exit(1) + + for o, a in opts: + if o == '-h': + Usage() + sys.exit() + elif o == '-r': rivershp = a + + if rivershp == None: + print 'input file or extent need to be specified' + Usage() + sys.exit(1) + + shapes = wt.Reach2Nodes(rivershp,4326,0.0001,resultdir) +if __name__ == "__main__": + main() Index: wflow-py/wflow/wtools_py/CreateGrid.py =================================================================== diff -u --- wflow-py/wflow/wtools_py/CreateGrid.py (revision 0) +++ wflow-py/wflow/wtools_py/CreateGrid.py (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -0,0 +1,199 @@ +from subprocess import call +import os +import sys +import math +import getopt +import string +import shutil +from osgeo import osr +from osgeo import ogr +from lxml import etree +try: + from osgeo import ogr +except ImportError: + import ogr +try: + from osgeo import gdal + from osgeo.gdalconst import * +except ImportError: + import gdal + from gdalconst import * +import wflowtools_lib as wt + +def Usage(): + print('') + print('Usage: CreateGrid [-f inputfile] [-c cell-size]') + print '-f file of which extent will be read. Most logically the catchment layer' + print ' format: ESRI Shapefile or any gdal supported raster format (preferred GeoTiff)' + print('') + +def main(): + + ''' parse command line arguments ''' + + argv = sys.argv + #argv = ['x','-f', 'Citarum.shp','-c','0.005'] + + inputfile = None + extent = None + cellsize = None + Projection = False + try: + opts, args = getopt.getopt(argv[1:], 'f:e::::c:') + except getopt.error: + print 'fout' + Usage() + sys.exit(1) + + for o, a in opts: + if o == '-h': + Usage() + sys.exit() + elif o == '-f': inputfile = a + elif o == '-e': extent = a + elif o == '-c': cellsize = a + + ''' checks on existence if input ''' + if inputfile == None: + if extent == None: + print 'input file or extent need to be specified' + Usage() + sys.exit(1) + else: + extent_in = [float(x) for x in string.split(extent,',')] + if cellsize == None: + print 'cell-size (-c) is not specified' + Usage() + sys.exit(1) + + ''' delete existing files ''' + if os.path.isdir('mask'): + shutil.rmtree('mask') + os.makedirs('mask') + + if not inputfile == None: + fileext = os.path.splitext(os.path.basename(inputfile))[1] + if fileext == '.shp': + '''ds openen als shape-file''' + file_att = os.path.splitext(os.path.basename(inputfile))[0] + ds = ogr.Open(inputfile) + '''read extent''' + lyr = ds.GetLayerByName(file_att) + extent = lyr.GetExtent() + extent_in = [extent[0],extent[2],extent[1],extent[3]] + spatialref = lyr.GetSpatialRef() + if not spatialref == None: + srs = osr.SpatialReference() + srs.ImportFromWkt(spatialref.ExportToWkt()) + proj = spatialref.GetUTMZone() + Projection = True + zone = str(abs(proj)) + if str(proj)[0] == '-': + hemisphere = 'S' + else: hemisphere = 'N' + geodatum = 'UTM' + zone + hemisphere + if proj == 0: + geodatum = 'WGS 1984' + else: + ds = gdal.Open(inputfile) + if ds == None: + print 'Input file specified not a shape-file or supported gdal raster format' + sys.exit(1) + '''read extent''' + geotransform = ds.GetGeoTransform() + raster_cellsize = geotransform[1] + ncols = ds.RasterXSize + nrows =ds.RasterYSize + extent_in = [geotransform[0],geotransform[3]-nrows*raster_cellsize,geotransform[0]+ncols*raster_cellsize,geotransform[3]] + spatialref = ds.GetProjection() + if not spatialref == None: + srs = osr.SpatialReference() + srs.ImportFromWkt(spatialref) + if 'UTM zone' in spatialref: + Projection = True + zone = spatialref[spatialref.find('UTM zone')+9:spatialref.find('UTM zone')+11] + hemisphere = spatialref[spatialref.find('UTM zone')+11:spatialref.find('UTM zone')+12] + geodatum = 'UTM' + zone + hemisphere + elif '"WGS 84"' in spatialref: + Projection = True + geodatum = 'WGS 1984' + + ''' warn if projection is not defined ''' + if not Projection: + print 'No projection defined in input file:' + print ' no projection will be assigned to mask.shp' + print ' no projection will be assigned to geoDatum element in grid.xml' + print '' + geodatum = '' + + ''' create mask.map ''' + snap = len(str(float(cellsize)-math.floor(float(cellsize))))-2 + extent_out = wt.round_extent(list(extent_in),float(cellsize),snap) + columns = int((extent_out[2]-extent_out[0])/float(cellsize)+2) + rows = int((extent_out[3]-extent_out[1])/float(cellsize)+2) + cells = rows*columns + xorg = extent_out[0]-float(cellsize) + yorg = extent_out[3]+float(cellsize) + + ## TODO use gdal_writemap + call(('mapattr','-s','-R',str(rows),'-C',str(columns),'-x',str(xorg),'-y',str(yorg),'-l',cellsize,'mask\mask.map')) + print 'mask.map created' + + ''' create grid.xml ''' + root = etree.Element("regular", locationId='wflow_mask') + etree.SubElement(root, 'rows').text = str(rows) + etree.SubElement(root, 'columns').text = str(columns) + etree.SubElement(root, 'geoDatum').text = geodatum + etree.SubElement(root, 'firstCellCenter') + etree.SubElement(root[3], 'x').text = str(xorg+0.5*float(cellsize)) + etree.SubElement(root[3], 'y').text = str(yorg-0.5*float(cellsize)) + etree.SubElement(root, 'xCellSize').text = str(cellsize) + etree.SubElement(root, 'yCellSize').text = str(cellsize) + gridxml = open('mask\grid.xml','w+') + gridxml.write(etree.tostring(root, pretty_print=True)) + gridxml.close() + + print 'grid.xml created' + + ''' Create shape-file ''' + Driver = ogr.GetDriverByName("ESRI Shapefile") + SHP_srs = "mask\mask.shp" + SHP_srs_ATT = os.path.splitext(os.path.basename(SHP_srs))[0] + SHP_srs_out = Driver.CreateDataSource(SHP_srs) + if Projection: + SHP_srs_LYR = SHP_srs_out.CreateLayer(SHP_srs_ATT, srs, geom_type=ogr.wkbPolygon) + else: SHP_srs_LYR = SHP_srs_out.CreateLayer(SHP_srs_ATT, geom_type=ogr.wkbPolygon) + fieldDef = ogr.FieldDefn("ID", ogr.OFTString) + fieldDef.SetWidth(12) + SHP_srs_LYR.CreateField(fieldDef) + ring = ogr.Geometry(ogr.wkbLinearRing) + ring.AddPoint(xorg,yorg) + ring.AddPoint(xorg + columns * float(cellsize),yorg) + ring.AddPoint(xorg + columns * float(cellsize),yorg - rows * float(cellsize)) + ring.AddPoint(xorg,yorg - rows * float(cellsize)) + ring.AddPoint(xorg,yorg) + ring.CloseRings + polygon = ogr.Geometry(ogr.wkbPolygon) + polygon.AddGeometry(ring) + feat_out = ogr.Feature(SHP_srs_LYR.GetLayerDefn()) + feat_out.SetGeometry(polygon) + feat_out.SetField("ID", 'wflow_mask') + SHP_srs_LYR.CreateFeature(feat_out) + SHP_srs_out.Destroy() + print 'mask.shp created' + print '' + print 'Number of cells: ' + str(cells) + if cells > 1000000: + print 'With this amount of cells your model will run VERY slow. Consider a larger cell-size' + print 'fast models run with < 100000 cells' + elif cells > 100000: + print 'With this amount of cells your model will run slow. Consider a larger cell-size' + print 'fast models run with < 100000 cells' +if __name__ == "__main__": + main() + +# TODO: remove hard folder mask from this script and staticmaps +# TODO: add extent +# TODO: add logger +# TODO: replace mapattr by gdal +# TODO: add projection code as optional argument and add projection to .shp \ No newline at end of file Index: wflow-py/wflow/wtools_py/StaticMaps.py =================================================================== diff -u --- wflow-py/wflow/wtools_py/StaticMaps.py (revision 0) +++ wflow-py/wflow/wtools_py/StaticMaps.py (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -0,0 +1,539 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Mar 12 13:26:48 2015 + +@author: tollena +""" +from subprocess import call +from osgeo import osr +import os +import sys +import pcraster as pcr +import getopt +import shutil +import numpy as np +import glob +try: + from osgeo import ogr +except ImportError: + import ogr +try: + from osgeo import gdal + from osgeo.gdalconst import * +except ImportError: + import gdal + from gdalconst import * + +import wflowtools_lib as wt +import wflow_lib as tr + +Driver = ogr.GetDriverByName("ESRI Shapefile") + +def Usage(): + print('') + print('Usage: Staticmaps [-i ini-file] [-r river] [-g gauges (shape)] [-c catchment (shape)] [-d dem (raster)] [-l landuse (raster)] [-s soiltype (raster)] -C') + print '' + print '-i ini file with settings for staticmaps.exe' + print '-r river network polyline layer (ESRI Shapefile)' + print '-g gauges point layer (ESRI Shapefile)' + print '-c catchment polygon layer (ESRI Shapefile)' + print '-d digital elevation model (GeoTiff)' + print '-l land-use land-cover layer (GeoTiff)' + print '-s soil-type layer (GeoTiff)' + print '-C removes the .xml files from the staticmaps directory when finished' + print '-A option to burn catchments "all touching".' + print 'Usuefull when catchment-size is small compared to cellsize' + print '' + print '!!NOTE: PREFERABLY ALL LAYERS SHOULD BE PROJECTED!!' + print '!!IF NOT, THE PROJECTION OF MASK WILL BE USED!!' + print '' + +#def main(): +clone_map = "d:\FEWS\FEWS-Kelantan\Kelantan\Modules\wflow\kelantan\mask\mask.map" +clone_shp = "d:\FEWS\FEWS-Kelantan\Kelantan\Modules\wflow\kelantan\mask\mask.shp" +clone_prj = "d:\FEWS\FEWS-Kelantan\Kelantan\Modules\wflow\kelantan\mask\mask.prj" +workdir = "work\\" +resultdir = "staticmaps\\" + +''' read commandline arguments ''' +argv = sys.argv +argv = ['','-i','d:\FEWS\FEWS-Kelantan\Kelantan\Modules\wflow\kelantan\StaticMaps.ini','-d','d:\FEWS\FEWS-Kelantan\Kelantan\Modules\data\DEM\srtm_kelantan_UTM48N_90m.tif','-r','d:\FEWS\FEWS-Kelantan\Kelantan\Modules\data\River_Network\Kelantan_Rivers_Hydrosheds_UTM48N.shp','-c','d:\FEWS\FEWS-Kelantan\Kelantan\Modules\data\River_Catchment\Kelantan_Catchment_Hydrosheds_UTM48N.shp','-C','-A'] +clone_EPSG = False + + +try: + opts, args = getopt.getopt(argv[1:], 'i:g:p:r:c:d:l:s:CA') +except getopt.error: + print 'fout' + Usage() + sys.exit(1) + +inifile = None +rivshp = None +catchshp = None +dem_in = None +landuse = None +soiltype = None +clean = False +gaugeshp = None +alltouching = False + +for o, a in opts: + if o == '-i': inifile = a + if o == '-p': clone_EPSG = 'EPSG:' + a + if o == '-r': rivshp = a + if o == '-c': catchshp = a + if o == '-d': dem_in = a + if o == '-l': landuse = a + if o == '-s': soiltype = a + if o == '-C': clean = True + if o == '-g': gaugeshp = a + if o == '-A': alltouching = True + +if inifile == None or rivshp == None or catchshp == None or dem_in == None: + print 'the following files are compulsory:' + print ' - ini-file' + print ' - DEM (raster)' + print ' - river (shape)' + print ' - catchment (shape)' + Usage() + sys.exit(1) + +if landuse == None: + print 'no raster with landuse classifications is specified. 1 class will be applied for the entire domain' + +if soiltype == None: + print 'no raster with soil classifications is specified. 1 class will be applied for the entire domain' + +''' read mask ''' +if not os.path.exists(clone_map): + print 'Mask not found. Make sure the file mask\mask.map exists' + print 'This file is usually created with the CreateGrid script' + sys.exit(1) +else: + pcr.setclone(clone_map) + ds = gdal.Open(clone_map,GA_ReadOnly) + clone_trans = ds.GetGeoTransform() + cellsize = clone_trans[1] + clone_rows = ds.RasterYSize + clone_columns = ds.RasterXSize + extent_mask = [clone_trans[0],clone_trans[3]-ds.RasterYSize*cellsize,clone_trans[0]+ds.RasterXSize*cellsize,clone_trans[3]] + xmin, ymin, xmax, ymax = map(str, extent_mask) + ds = None + ones = pcr.scalar(pcr.readmap(clone_map)) + zeros = ones * 0 + empty = pcr.ifthen(ones == 0, pcr.scalar(0)) + +''' read projection from mask.shp ''' +# TODO: check how to deal with projections (add .prj to mask.shp in creategrid) +if not os.path.exists(clone_prj): + print 'please add prj-file to mask.shp' + sys.exit(1) +if os.path.exists(clone_shp): + ds = ogr.Open(clone_shp) + file_att = os.path.splitext(os.path.basename(clone_shp))[0] + lyr = ds.GetLayerByName(file_att) + spatialref = lyr.GetSpatialRef() + if not spatialref == None: + srs_clone = osr.SpatialReference() + srs_clone.ImportFromWkt(spatialref.ExportToWkt()) + srs_clone.AutoIdentifyEPSG() + unit_clone = False + unit_clone = srs_clone.GetAttrValue('UNIT').lower() + clone_EPSG = 'EPSG:'+srs_clone.GetAttrValue("AUTHORITY",1) + # TODO: fix hard EPSG code below + #clone_EPSG = 'EPSG:'+ '4167' + print 'EPSG-code is read from mask.shp: ' + clone_EPSG + spatialref == None +if not clone_EPSG: + print 'EPSG-code cannot be read from mask.shp' + print 'please add prj-file to mask.shp or specify on command line' + print 'e.g. -p EPSG:4326 (for WGS84 lat lon projection)' + +ds = None +clone_EPSG_int = int(clone_EPSG[5:len(clone_EPSG)]) + +''' open config-file ''' +config=wt.OpenConf(inifile) + +''' read settings ''' +snapgaugestoriver = bool(int(wt.configget(config, + "settings", + "snapgaugestoriver","1"))) +burnalltouching = bool(int(wt.configget(config, + "settings", + "burncatchalltouching", + "1"))) +burninorder = bool(int(wt.configget(config,"settings","burncatchalltouching","0"))) +verticetollerance =float(wt.configget(config,"settings","vertice_tollerance","0.0001")) + +''' read parameters ''' +burn_outlets = int(wt.configget(config,"parameters","burn_outlets",10000)) +burn_rivers = int(wt.configget(config,"parameters","burn_rivers",200)) +burn_connections = int(wt.configget(config,"parameters","burn_connections",100)) +burn_gauges = int(wt.configget(config,"parameters","burn_gauges",100)) +minorder = int(wt.configget(config,"parameters","riverorder_min",3)) +exec "percentile=tr.array(" + wt.configget(config,"parameters","statisticmaps",[0,100]) + ")" +if not unit_clone: + print 'failed to read unit (meter or degree) from mask projection' + unit_clone=str(wt.configget(config,"settings","unit",'meter')) + print 'unit read from settings: ' + unit_clone +if unit_clone == 'degree': + cellsize_hr = float(wt.configget(config,"parameters","highres_degree",0.0005)) +elif (unit_clone == 'metre') or (unit_clone == 'meter'): + cellsize_hr = float(wt.configget(config,"parameters","highres_metre",50)) + +cols_hr = int((float(xmax)-float(xmin))/cellsize_hr + 2) +rows_hr = int((float(ymax)-float(ymin))/cellsize_hr + 2) +hr_trans = (float(xmin),cellsize_hr,float(0),float(ymax),0,-cellsize_hr) + +''' read staticmap locations ''' +catchment_map = wt.configget(config,"staticmaps", "catchment", "wflow_catchment.map") +dem_map = wt.configget(config,"staticmaps", "dem","wflow_dem.map") +demmax_map = wt.configget(config,"staticmaps", "demmax","wflow_demmax.map") +demmin_map = wt.configget(config,"staticmaps", "demmin","wflow_demmin.map") +gauges_map = wt.configget(config,"staticmaps", "gauges","wflow_gauges.map") +landuse_map = wt.configget(config,"staticmaps", "landuse","wflow_landuse.map") +ldd_map = wt.configget(config,"staticmaps", "ldd","wflow_ldd.map") +river_map = wt.configget(config,"staticmaps", "river","wflow_river.map") +outlet_map = wt.configget(config,"staticmaps", "outlet", "wflow_outlet.map") +riverlength_fact_map = wt.configget(config,"staticmaps", "riverlength_fact","wflow_riverlength_fact.map") +soil_map = wt.configget(config,"staticmaps", "soil","wflow_soil.map") +streamorder_map = wt.configget(config,"staticmaps", "streamorder","wflow_streamorder.map") +subcatch_map = wt.configget(config,"staticmaps", "subcatch","wflow_subcatch.map") + + +''' read mask location (optional) ''' +masklayer = wt.configget(config,"mask","masklayer",catchshp) + +''' create directories ''' +if os.path.isdir(workdir): + shutil.rmtree(workdir) +os.makedirs(workdir) + +if os.path.isdir(resultdir): + shutil.rmtree(resultdir) +os.makedirs(resultdir) + +''' Preperation steps ''' +zero_map = workdir + "zero.map" +zero_tif = workdir + "zero.tif" +pcr.report(zeros, zero_map) +# TODO: replace gdal_translate call +call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,'-ot','Float32', zero_map, zero_tif)) +pcr.setglobaloption("lddin") + +''' resample DEM ''' +dem_resample = workdir + "dem_resampled.tif" +ds = gdal.Open(dem_in,GA_ReadOnly) +band = ds.GetRasterBand(1) +nodata = band.GetNoDataValue() +proj = ds.GetGeoTransform() +cellsize_dem = proj[1] + +''' read DEM projection ''' +spatialref == None +spatialref = ds.GetProjection() +if not spatialref == None: + srs = osr.SpatialReference() + srs.ImportFromWkt(spatialref) + srs.AutoIdentifyEPSG() + dem_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) + print 'EPSG-code is read from ' + os.path.basename(dem_in) + ': ' + dem_EPSG + spatialref == None + dem_EPSG_int = int(dem_EPSG[5:len(dem_EPSG)]) + srs_DEM = osr.SpatialReference() + srs_DEM.ImportFromEPSG(dem_EPSG_int) + clone2dem_transform = osr.CoordinateTransformation(srs_clone,srs_DEM) +else: + dem_EPSG = clone_EPSG + print 'No projection defined for ' + os.path.basename(dem_in) + print 'Assumed to be the same as model projection (' + clone_EPSG + ')' + +ds = None +print 'Resampling DEM...' +if nodata == None: + call(('gdalwarp','-overwrite','-t_srs',clone_prj,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize), '-dstnodata', str(-9999),'-r','cubic',dem_in, dem_resample)) +else: call(('gdalwarp','-overwrite','-t_srs',clone_prj,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize), '-srcnodata', str(nodata), '-dstnodata', str(nodata),'-r','cubic',dem_in, dem_resample)) + +''' create dem.map and statistic maps ''' +dem_resample_map = resultdir + dem_map +call(('gdal_translate', '-of', 'PCRaster','-a_srs',clone_EPSG,'-ot','Float32',dem_resample,dem_resample_map)) +print 'Computing DEM statistics ....' +stats = wt.windowstats(dem_in, clone_rows, clone_columns, + clone_trans, srs_clone, resultdir, percentile) + +''' burn DEM ''' +ds = ogr.Open(rivshp) +file_att = os.path.splitext(os.path.basename(rivshp))[0] +lyr = ds.GetLayerByName(file_att) +spatialref = lyr.GetSpatialRef() +# if not spatialref == None: +# srs = osr.SpatialReference() +# srs.ImportFromWkt(spatialref.ExportToWkt()) +# srs.AutoIdentifyEPSG() +# rivshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) +# spatialref == None +# else: +rivshp_EPSG = clone_EPSG +print 'No projection defined for ' + file_att + '.shp' +print 'Assumed to be the same as model projection (' + clone_EPSG + ')' + +# strip rivers to nodes +xminc = str(float(xmin)+0.5*cellsize) +yminc = str(float(ymin)+0.5*cellsize) +xmaxc = str(float(xmax)-0.5*cellsize) +ymaxc = str(float(ymax)-0.5*cellsize) +if rivshp_EPSG == clone_EPSG: + rivclipshp = workdir + 'rivshape_clip.shp' + call(('ogr2ogr','-s_srs',clone_EPSG,'-t_srs',clone_EPSG,'-spat',xmin,ymin,xmax,ymax,'-clipsrc',xminc,yminc,xmaxc,ymaxc,rivclipshp,rivshp)) +else: + rivprojshp = workdir + 'rivshape_proj.shp' + rivclipshp = workdir + 'rivshape_clip.shp' + call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_EPSG,'-spat',xmin,ymin,xmax,ymax,rivprojshp,rivshp)) + call(('ogr2ogr','-s_srs',clone_EPSG,'-t_srs',clone_EPSG,'-spat',xmin,ymin,xmax,ymax,'-clipsrc',xminc,yminc,xmaxc,ymaxc,rivclipshp,rivprojshp)) + +rivshp = rivclipshp + + +#### BURNING BELOW #### + +# TODO: check if extraction can be done within memory and retun a burn layer +shapes = wt.Reach2Nodes(rivclipshp, clone_EPSG_int, + cellsize*verticetollerance, workdir) + +outlets = shapes[1] +connections = shapes[2] +outlets_att = os.path.splitext(os.path.basename(outlets))[0] +connections_att = os.path.splitext(os.path.basename(connections))[0] +dem_resample_att = os.path.splitext(os.path.basename(dem_resample))[0] +connections_tif = workdir + connections_att + ".tif" +outlets_tif = workdir + outlets_att + ".tif" +# TODO: make the burning in memory +call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,'-ot','Float32',zero_map,connections_tif)) +call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,'-ot','Float32',zero_map,outlets_tif)) +call(('gdal_rasterize','-burn','1','-l',outlets_att,outlets,outlets_tif)) +call(('gdal_rasterize','-burn','1','-l',connections_att,connections,connections_tif)) + +# convert rivers to order +rivshp_att = os.path.splitext(os.path.basename(rivshp))[0] +rivers_tif = workdir + rivshp_att + ".tif" +call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,'-ot','Float32',zero_map,rivers_tif)) +if burninorder: # make river shape with an order attribute + OrderSHPs = wt.ReachOrder(rivshp,clone_EPSG_int,cellsize*verticetollerance,workdir) + wt.Burn2Tif(OrderSHPs,'order',rivers_tif) +else: call(('gdal_rasterize', '-burn', '1', '-l', + rivshp_att, rivshp,rivers_tif)) + +# convert 2 maps +connections_map = workdir + connections_att + ".map" +rivers_map = workdir + rivshp_att + ".map" +outlets_map = workdir + outlets_att + ".map" +call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,'-ot','Float32',connections_tif,connections_map)) +call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,'-ot','Float32',rivers_tif,rivers_map)) +call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,'-ot','Float32',outlets_tif,outlets_map)) + +# burn the layers in DEM +outletsburn = pcr.scalar(pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets) +connectionsburn = pcr.scalar(pcr.readmap(connections_map))*pcr.scalar(burn_connections) +riverburn = pcr.scalar(pcr.readmap(rivers_map))* pcr.scalar(burn_rivers) +ldddem = pcr.cover(dem_resample_map, pcr.ifthen(riverburn>0,pcr.scalar(0))) +ldddem = ldddem - outletsburn - connectionsburn - riverburn +ldddem = pcr.cover(ldddem,pcr.scalar(0)) +pcr.report(ldddem, workdir +"dem_burn.map") + +''' create ldd for multi-catchments ''' +ldd = pcr.ldd(empty) +# reproject catchment shape-file +ds = ogr.Open(catchshp) +file_att = os.path.splitext(os.path.basename(catchshp))[0] +lyr = ds.GetLayerByName(file_att) +spatialref = lyr.GetSpatialRef() +# if not spatialref == None: +# srs = osr.SpatialReference() +# srs.ImportFromWkt(spatialref.ExportToWkt()) +# srs.AutoIdentifyEPSG() +# catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) +# spatialref == None +# else: +catchshp_EPSG = clone_EPSG +print 'No projection defined for ' + file_att + '.shp' +print 'Assumed to be the same as model projection (' + clone_EPSG + ')' + +if not rivshp_EPSG == clone_EPSG: + catchprojshp = workdir + 'catchshape_proj.shp' + call(('ogr2ogr','-s_srs',catchshp_EPSG,'-t_srs',clone_ESPG,catchprojshp,catchshp)) + catchshp = catchprojshp +ds.Destroy() + +ds = ogr.Open(catchshp) +file_att = os.path.splitext(os.path.basename(catchshp))[0] +lyr = ds.GetLayerByName(file_att) + +fieldDef = ogr.FieldDefn("ID", ogr.OFTString) +fieldDef.SetWidth(12) +TEMP_out = Driver.CreateDataSource(workdir + "temp.shp") +if not srs == None: + TEMP_LYR = TEMP_out.CreateLayer("temp", srs,geom_type=ogr.wkbMultiPolygon) +else: TEMP_LYR = TEMP_out.CreateLayer("temp", geom_type=ogr.wkbMultiPolygon) +TEMP_LYR.CreateField(fieldDef) + +for i in range(lyr.GetFeatureCount()): + orgfeature = lyr.GetFeature(i) + geometry = orgfeature.geometry() + feature = ogr.Feature(TEMP_LYR.GetLayerDefn()) + feature.SetGeometry(geometry) + feature.SetField("ID",str(i+1)) + TEMP_LYR.CreateFeature(feature) +TEMP_out.Destroy() +ds.Destroy + +# rasterize catchment map +catchments_tif = workdir + "catchments.tif" +catchments_map = workdir + "catchments.map" +call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,catchments_tif)) +if alltouching: + call(('gdal_rasterize','-at','-a','ID','-l',"temp",workdir + 'temp.shp',catchments_tif)) +else: + call(('gdal_rasterize','-a','ID','-l',"temp",workdir + 'temp.shp',catchments_tif)) +call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,catchments_tif,catchments_map)) +catchments = pcr.readmap(catchments_map) +riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn))) +rivercatch = pcr.areamajority(pcr.ordinal(catchments),riverunique) +#catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0)) +catchments = pcr.cover(pcr.ifthen(catchments > 0, + pcr.ordinal(catchments)), + pcr.ifthen(riverburn>0, + pcr.ordinal(pcr.spreadzone(pcr.nominal(catchments), + pcr.ifthen(riverburn>0, + pcr.scalar(1)), + 1)))) +rivercatch_map = workdir + "catchments_river.map" +catchclip_map = workdir + "catchments_clip.map" +pcr.report(rivercatch,rivercatch_map) +pcr.report(catchments,catchclip_map) + +ds = ogr.Open(workdir + "temp.shp") +lyr = ds.GetLayerByName("temp") + +print 'calculating ldd' +for i in range(lyr.GetFeatureCount()): + feature = lyr.GetFeature(i) + catch = int(feature.GetField("ID")) + print "calculating ldd for catchment: " + str(i+1) + "/" + str(lyr.GetFeatureCount()) + "...." + ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch, catchments)) * 0 + 1 * ldddem + ldd_select=pcr.lddcreate(ldddem_select,float("1E35"),float("1E35"),float("1E35"),float("1E35")) + ldd=pcr.cover(ldd,ldd_select) +pcr.report(ldd,resultdir + ldd_map) +ds.Destroy() + +''' report stream order, river and dem ''' +streamorder = pcr.ordinal(pcr.streamorder(ldd)) +river = pcr.ifthen(streamorder >= pcr.ordinal(minorder),pcr.boolean(1)) +mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map),9999999))) +dem_resample_map = pcr.cover(dem_resample_map,pcr.scalar(river)*0+mindem) +pcr.report(dem_resample_map, resultdir + dem_map) +pcr.report(streamorder,resultdir+streamorder_map) +pcr.report(river,resultdir+river_map) + +''' deal with your catchments ''' +if gaugeshp == None: + print 'No gauges defined, using outlets instead' + gauges = pcr.ordinal(pcr.uniqueid(pcr.boolean(pcr.ifthen(pcr.scalar(ldd)==5,pcr.boolean(1))))) + pcr.report(gauges,resultdir+gauges_map) +# ds = ogr.Open(gaugeshp) +# file_att = os.path.splitext(os.path.basename(gaugeshp))[0] +# lyr = ds.GetLayerByName(file_att) +# spatialref = lyr.GetSpatialRef() +## if not spatialref == None: +## srs = osr.SpatialReference() +## srs.ImportFromWkt(spatialref.ExportToWkt()) +## srs.AutoIdentifyEPSG() +## gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) +## spatialref == None +# #else: +# gaugeshp_EPSG = clone_EPSG +# print 'No projection defined for ' + file_att + '.shp' +# print 'Assumed to be the same as model projection (' + clone_EPSG + ')' +# +# # reproject gauge shape if necesarry +# if not gaugeshp_EPSG == clone_EPSG: +# gaugeprojshp = workdir + 'gaugeshape_proj.shp' +# call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_ESPG,gaugeprojshp,gaugeshp)) +# gaugeshp = gaugeprojshp +# +# file_att = os.path.splitext(os.path.basename(gaugeshp))[0] +# gaugestif = workdir + file_att + '.tif' +# gaugesmap = workdir + file_att + '.map' +# call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,gaugestif)) +# call(('gdal_rasterize','-burn','1','-l',file_att,gaugeshp,gaugestif)) +# call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,gaugestif,gaugesmap)) +# gaugelocs = pcr.readmap(gaugesmap) +# snapgaugestoriver = True +# +# if snapgaugestoriver: +# print "Snapping gauges to river" +# gauges = pcr.uniqueid(pcr.boolean(gaugelocs)) +# gauges= wt.snaptomap(pcr.ordinal(gauges),river) +# +# gaugesmap = pcr.ifthen(gauges > 0, gauges) + + +''' report riverlengthfrac ''' +riv_hr = workdir + 'river_highres.tif' +wt.CreateTif(riv_hr,rows_hr,cols_hr,hr_trans,srs_clone,0) +file_att = os.path.splitext(os.path.basename(rivshp))[0] +call(('gdal_rasterize','-burn','1','-l',file_att,rivshp,riv_hr)) +print 'Computing river length...' +#riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform) +riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac') + +''' report outlet map ''' +pcr.report(pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.ordinal(1)),resultdir+outlet_map) + +''' report map ''' +catchment = pcr.ifthen(catchments > 0,pcr.ordinal(1)) +pcr.report(catchment,resultdir+catchment_map) + +''' report subcatchment map ''' +subcatchment = pcr.subcatchment(ldd,gauges) +pcr.report(pcr.ordinal(subcatchment),resultdir+subcatch_map) + +''' report landuse map ''' +if landuse == None: + pcr.report(pcr.nominal(ones),resultdir+landuse_map) +else: + landuse_resample = workdir + 'landuse.tif' + landuse_map = resultdir+ landuse_map + transform = wt.GetRasterTranform(landuse,srs_clone) + if not transform[0]: + call(('gdalwarp','-overwrite','-s_srs',clone_EPSG,'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',landuse, landuse_resample)) + else: + call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',landuse, landuse_resample)) + call(('gdal_translate','-of','PCRaster','-ot','Float32', landuse_resample,landuse_map)) + landuse_work = pcr.readmap(landuse_map) + pcr.report(pcr.nominal(landuse_work),landuse_map) + +''' report soil map ''' +if soiltype == None: + pcr.report(pcr.nominal(ones),resultdir+soil_map) +else: + soiltype_resample = workdir + 'soiltype.tif' + soil_map = resultdir+soil_map + #transform = wt.GetRasterTranform(soiltype,srs_clone) +# if not transform[0]: + call(('gdalwarp','-overwrite','-s_srs',clone_EPSG,'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample)) +# else: +# call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample)) + call(('gdal_translate','-of','PCRaster','-ot','Float32', soiltype_resample,soil_map)) + soiltype_work = pcr.readmap(soil_map) + pcr.report(pcr.nominal(soiltype_work),soil_map) + +if clean: + wt.DeleteList(glob.glob(os.getcwd()+ '\\' + resultdir +'/*.xml')) +#if __name__ == "__main__": +# main() + +# TODO: replace calls to absoute folders for configurable folders +# TODO: check if 'work' folder is still needed +# windowstat, replace raster write functions to gdal_writemap Index: wflow-py/wflow/wtools_py/create_grid.py =================================================================== diff -u --- wflow-py/wflow/wtools_py/create_grid.py (revision 0) +++ wflow-py/wflow/wtools_py/create_grid.py (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -0,0 +1,242 @@ +# -*- coding: utf-8 -*- +""" +Created on Wed Jul 08 16:12:29 2015 + +@author: winsemi + +$Id: create_grid.py 1126 2015-11-18 16:28:37Z winsemi $ +$Date: 2015-11-18 23:28:37 +0700 (Wed, 18 Nov 2015) $ +$Author: winsemi $ +$Revision: 1126 $ +$HeadURL: https://repos.deltares.nl/repos/Hydrology/trunk/hydro-earth/wtools/scripts/create_grid.py $ +$Keywords: $ + +""" + +# import sys packages +from hydrotools import gis +import sys +import os +import shutil +# import admin packages +from optparse import OptionParser + +# import general packages +import numpy as np +from osgeo import osr, gdal +from osgeo import ogr +from lxml import etree +import pyproj +# import specific packages +import wtools_lib +# import pdb + +def main(): + ### Read input arguments ##### + logfilename = 'wtools_create_grid.log' + parser = OptionParser() + usage = "usage: %prog [options]" + parser = OptionParser(usage=usage) + parser.add_option('-q', '--quiet', + dest='verbose', default=True, action='store_false', + help='do not print status messages to stdout') + parser.add_option('-f', '--file', dest='inputfile', nargs=1, + help='file of which extent will be read. Most logically the catchment layer\nformat: ESRI Shapefile or any gdal supported raster format (preferred GeoTiff)') + parser.add_option('-e', '--extent', + nargs=4, dest='extent', type='float', + help='extent in WGS 1984 lat-lon (xmin, ymin, xmax, ymax)') + parser.add_option('-l', '--logfile', + dest='logfilename', default='wtools_create_grid.log', + help='log file name') + parser.add_option('-p', '--projection', + dest='projection', default='EPSG:4326', + help='Only used if no file is provided, either of type EPSG:<####> or +proj...') + parser.add_option('-c', '--cellsize', type='float', + nargs=1, dest='cellsize', default=0.005, + help='extent') + parser.add_option('-s', '--snap', + dest='snap', default=False, action='store_true', + help='Snaps grid extents to a multiple of the resolution') + parser.add_option('-d', '--destination', + dest='destination', default='wflow', + help='Destination folder (default=./wflow)') + + (options, args) = parser.parse_args() + + ##### Preprocessing ##### + # check if either a file or an extent is provided. If not, sys.exit + if not np.logical_or(os.path.isfile(options.inputfile), options.extent is not None): + parser.error('No file or extent given') + parser.print_help() + sys.exit(1) + + # open a logger, dependent on verbose print to screen or not + logger, ch = wtools_lib.setlogger(logfilename, 'WTOOLS', options.verbose) + + # delete old files + if os.path.isdir(options.destination): + shutil.rmtree(options.destination) + os.makedirs(options.destination) + + ### Get information #### + if options.inputfile is not None: + # retrieve extent from input file. Check if projection is provided + file_ext = os.path.splitext(os.path.basename(options.inputfile))[1] + if file_ext == '.shp': + file_att = os.path.splitext(os.path.basename(options.inputfile))[0] + ds = ogr.Open(options.inputfile) + # read the extent of the shapefile + lyr = ds.GetLayerByName(file_att) + extent = lyr.GetExtent() + extent_in = [extent[0], extent[2], extent[1], extent[3]] + # get spatial reference from shapefile + srs = lyr.GetSpatialRef() + else: + # Read extent from a GDAL compatible file + try: + extent_in = wtools_lib.get_extent(options.inputfile) + except: + msg = 'Input file {:s} not a shape or gdal file'.format(options.inputfile) + wtools_lib.close_with_error(logger, ch, msg) + sys.exit(1) + +# # get spatial reference from grid file + try: + srs = wtools_lib.get_projection(options.inputfile) + except: + logger.warning('No projection found, assuming WGS 1984 lat long') + srs = osr.SpatialReference() + srs.ImportFromEPSG(4326) + +# geotransform = ds.GetGeoTransform() +# raster_cellsize = geotransform[1] +# ncols = ds.RasterXSize +# nrows = ds.RasterYSize +# extent_in = [geotransform[0], +# geotransform[3]-nrows*raster_cellsize, +# geotransform[0]+ncols*raster_cellsize, +# geotransform[3]] +# # get spatial reference from grid file +# WktString = ds.GetProjection() +# srs = osr.SpatialReference() +# srs.ImportFromWkt(WktString) + else: + lonmin, latmin, lonmax, latmax = options.extent + srs_4326 = osr.SpatialReference() + srs_4326.ImportFromEPSG(4326) + srs = osr.SpatialReference() + if options.projection is not None: + # import projection as an srs object + if options.projection.lower()[0:4] == 'epsg': + # make a proj4 string + srs.ImportFromEPSG(int(options.projection[5:])) + elif options.projection.lower()[0:5] == '+proj': + srs.ImportFromProj4(options.projection) + else: + msg = 'Projection "{:s}" is not a valid projection'.format(options.projection) + wtools_lib.close_with_error(logger, ch, msg) + else: + logger.warning('No projection found, assuming WGS 1984 lat long') + srs.ImportFromEPSG(4326) + xmin, ymin = pyproj.transform(pyproj.Proj(srs_4326.ExportToProj4()), + pyproj.Proj(srs.ExportToProj4()), lonmin, latmin) + xmax, ymax = pyproj.transform(pyproj.Proj(srs_4326.ExportToProj4()), + pyproj.Proj(srs.ExportToProj4()), lonmax, latmax) + # project the extent parameters to selected projection and snap to selected resolution + extent_in = [xmin, ymin, xmax, ymax] + + # srs known, extent known, prepare UTM or WGS string for grid.xml + logger.info('Projection "{:s}" used'.format(srs.ExportToProj4())) + if srs.IsProjected(): + utm = srs.GetUTMZone() + if utm < 0: + hemisphere = 'S' + else: + hemisphere = 'N' + geodatum = 'UTM{:d}{:s}'.format(np.abs(utm), hemisphere) + else: + geodatum = 'WGS 1984' + + if options.snap: + logger.info('Snapping raster') + snap = len(str(options.cellsize-np.floor(options.cellsize)))-2 + extent_out = wtools_lib.round_extent(extent_in, options.cellsize, snap) + else: + extent_out = extent_in + cols = int((extent_out[2]-extent_out[0])/options.cellsize) # +2) + rows = int((extent_out[3]-extent_out[1])/options.cellsize) # +2) + cells = rows*cols + xorg = extent_out[0] # -options.cellsize + yorg = extent_out[3] # +options.cellsize + + # create clone raster + print('rows: {0} cols: {1}'.format(rows, cols)) + + dummy_raster = np.zeros((rows, cols))-9999. + clone_file_map = os.path.abspath(os.path.join(options.destination, 'mask.map')) + clone_file_tif = os.path.abspath(os.path.join(options.destination, 'mask.tif')) + logger.info('Writing PCRaster clone to {:s}'.format(clone_file_map)) + gis.gdal_writemap(clone_file_map, 'PCRaster', + xorg, yorg, dummy_raster, + -9999., resolution=options.cellsize, + srs=srs) + logger.info('Writing Geotiff clone to {:s}'.format(clone_file_tif)) + gis.gdal_writemap(clone_file_tif, 'GTiff', + xorg, yorg, dummy_raster, + -9999., resolution=options.cellsize, + zlib=True, srs=srs) + + # create grid.xml + root = etree.Element('regular', locationId='wflow_mask') + etree.SubElement(root, 'rows').text = str(rows) + etree.SubElement(root, 'columns').text = str(cols) + etree.SubElement(root, 'geoDatum').text = geodatum + etree.SubElement(root, 'firstCellCenter') + etree.SubElement(root[3], 'x').text = str(xorg+0.5*options.cellsize) + etree.SubElement(root[3], 'y').text = str(yorg-0.5*options.cellsize) + etree.SubElement(root, 'xCellSize').text = str(options.cellsize) + etree.SubElement(root, 'yCellSize').text = str(options.cellsize) + xml_file = os.path.abspath(os.path.join(options.destination, 'grid.xml')) + logger.info('Writing FEWS grid definition to {:s}'.format(xml_file)) + gridxml = open(xml_file, 'w+') + gridxml.write(etree.tostring(root, pretty_print=True)) + gridxml.close() + + # create shape file + Driver = ogr.GetDriverByName("ESRI Shapefile") + shp_file = os.path.abspath(os.path.join(options.destination, 'mask.shp')) + logger.info('Writing shape of clone to {:s}'.format(shp_file)) + shp_att = os.path.splitext(os.path.basename(shp_file))[0] + shp = Driver.CreateDataSource(shp_file) + lyr = shp.CreateLayer(shp_att, srs, geom_type=ogr.wkbPolygon) + fieldDef = ogr.FieldDefn('ID', ogr.OFTString) + fieldDef.SetWidth(12) + lyr.CreateField(fieldDef) + ring = ogr.Geometry(ogr.wkbLinearRing) + ring.AddPoint(xorg, yorg) + ring.AddPoint(xorg + cols * options.cellsize, + yorg) + ring.AddPoint(xorg + cols * options.cellsize, + yorg - rows * options.cellsize) + ring.AddPoint(xorg, yorg - rows * options.cellsize) + ring.AddPoint(xorg, yorg) + ring.CloseRings + polygon = ogr.Geometry(ogr.wkbPolygon) + polygon.AddGeometry(ring) + feat_out = ogr.Feature(lyr.GetLayerDefn()) + feat_out.SetGeometry(polygon) + feat_out.SetField('ID', 'wflow_mask') + lyr.CreateFeature(feat_out) + shp.Destroy() + logger.info('Model contains {:d} cells'.format(cells)) + if cells > 5000000: + logger.warning('With this amount of cells your model will run VERY slow.\nConsider a larger cell-size.\nFast models run with < 1,000,000 cells') + elif cells > 1000000: + logger.warning('With this amount of cells your model will run slow.\nConsider a larger cell-size. Fast models run with < 1,000,000 cells') + logger, ch = wtools_lib.closeLogger(logger, ch) + del logger, ch + sys.exit(1) + +if __name__ == "__main__": + main() + Index: wflow-py/wflow/wtools_py/static_maps.py =================================================================== diff -u --- wflow-py/wflow/wtools_py/static_maps.py (revision 0) +++ wflow-py/wflow/wtools_py/static_maps.py (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -0,0 +1,495 @@ +# -*- coding: utf-8 -*- +""" +Created on Wed Jul 15 12:11:43 2015 + +@author: winsemi + +$Id: static_maps.py 1125 2015-11-18 16:28:25Z winsemi $ +$Date: 2015-11-18 23:28:25 +0700 (Wed, 18 Nov 2015) $ +$Author: winsemi $ +$Revision: 1125 $ +$HeadURL: https://repos.deltares.nl/repos/Hydrology/trunk/hydro-earth/wtools/scripts/static_maps.py $ +$Keywords: $ + +""" + +# import sys packages +from subprocess import call +import sys +import os +import shutil +import glob + +# import admin packages +from optparse import OptionParser + +# import general packages +import numpy as np +from osgeo import osr, gdal, ogr, gdalconst +from lxml import etree +from osgeo import gdalconst + +# import specific packages +from hydrotools import gis +import pcraster as pcr +import wtools_lib + +driver = ogr.GetDriverByName("ESRI Shapefile") + +def clip_catchment_by_cell(cell_geom, catchment_geom): + return cell_geom.intersection(catchment_geom) + +def main(): + + ### Read input arguments ##### + logfilename = 'wtools_static_maps.log' + parser = OptionParser() + usage = "usage: %prog [options]" + parser = OptionParser(usage=usage) + parser.add_option('-q', '--quiet', + dest='verbose', default=True, action='store_false', + help='do not print status messages to stdout') + parser.add_option('-i', '--ini', dest='inifile', default=None, + help='ini file with settings for static_maps.exe') + parser.add_option('-s', '--source', + dest='source', default='wflow', + help='Source folder containing clone (default=./wflow)') + parser.add_option('-d', '--destination', + dest='destination', default='staticmaps', + help='Destination folder (default=./staticmaps)') + parser.add_option('-r', '--river', + dest='rivshp', default=None, + help='river network polyline layer (ESRI Shapefile)') + parser.add_option('-c', '--catchment', + dest='catchshp', default=None, + help='catchment polygon layer (ESRI Shapefile)') + parser.add_option('-g', '--gauges', + dest='gaugeshp', default=None, + help='gauge point layer (ESRI Shapefile)') + parser.add_option('-D', '--dem', + dest='dem_in', default=None, + help='digital elevation model (GeoTiff)') + parser.add_option('-L', '--landuse', + dest='landuse', default=None, + help='land use / land cover layer (GeoTiff)') + parser.add_option('-S', '--soiltype', + dest='soil', default=None, + help='soil type layer (GeoTiff)') + parser.add_option('-V', '--vegetation', + dest='lai', default=None, + help='vegetation LAI layer location (containing 12 GeoTiffs )') + parser.add_option('-O', '--other_maps', + dest='other_maps', default=None, + help='bracketed [] comma-separated list of paths to other maps that should be reprojected') + parser.add_option('-C', '--clean', + dest='clean', default=False, action='store_true', + help='Clean the .xml files from static maps folder when finished') + parser.add_option('-A', '--alltouch', + dest='alltouch', default=False, action='store_true', + help='option to burn catchments "all touching".\nUseful when catchment-size is small compared to cellsize') + (options, args) = parser.parse_args() + # parse other maps into an array + options.other_maps = options.other_maps.replace(' ', '').replace('[', '').replace(']', '').split(',') + + options.source = os.path.abspath(options.source) + clone_map = os.path.join(options.source, 'mask.map') + clone_shp = os.path.join(options.source, 'mask.shp') + clone_prj = os.path.join(options.source, 'mask.prj') + + if None in (options.inifile, + options.rivshp, + options.catchshp, + options.dem_in): + msg = """The following files are compulsory: + - ini file + - DEM (raster) + - river (shape) + - catchment (shape) + """ + print(msg) + parser.print_help() + sys.exit(1) + if not os.path.exists(options.inifile): + print 'path to ini file cannot be found' + sys.exit(1) + if not os.path.exists(options.rivshp): + print 'path to river shape cannot be found' + sys.exit(1) + if not os.path.exists(options.catchshp): + print 'path to catchment shape cannot be found' + sys.exit(1) + if not os.path.exists(options.dem_in): + print 'path to DEM cannot be found' + sys.exit(1) + + + # open a logger, dependent on verbose print to screen or not + logger, ch = wtools_lib.setlogger(logfilename, 'WTOOLS', options.verbose) + + # create directories # TODO: check if workdir is still necessary, try to keep in memory as much as possible + + # delete old files (when the source and destination folder are different) + if np.logical_and(os.path.isdir(options.destination), + options.destination is not options.source): + shutil.rmtree(options.destination) + if options.destination is not options.source: + os.makedirs(options.destination) + + # Read mask + if not(os.path.exists(clone_map)): + logger.error('Clone file {:s} not found. Please run create_grid first.'.format(clone_map)) + sys.exit(1) + else: + # set clone + pcr.setclone(clone_map) + # get the extent from clone.tif + xax, yax, clone, fill_value = gis.gdal_readmap(clone_map, 'GTiff') + trans = wtools_lib.get_geotransform(clone_map) + extent = wtools_lib.get_extent(clone_map) + xmin, ymin, xmax, ymax = extent + zeros = np.zeros(clone.shape) + ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999) + # get the projection from clone.tif + srs = wtools_lib.get_projection(clone_map) + unit_clone = srs.GetAttrValue('UNIT').lower() + + ### READ CONFIG FILE + # open config-file + config=wtools_lib.OpenConf(options.inifile) + + # read settings + snapgaugestoriver = wtools_lib.configget(config, 'settings', + 'snapgaugestoriver', + True, datatype='boolean') + burnalltouching = wtools_lib.configget(config, 'settings', + 'burncatchalltouching', + True, datatype='boolean') + burninorder = wtools_lib.configget(config, 'settings', + 'burncatchalltouching', + False, datatype='boolean') + verticetollerance = wtools_lib.configget(config, 'settings', + 'vertice_tollerance', + 0.0001, datatype='float') + + ''' read parameters ''' + burn_outlets = wtools_lib.configget(config, 'parameters', + 'burn_outlets', 10000, + datatype='int') + burn_rivers = wtools_lib.configget(config, 'parameters', + 'burn_rivers', 200, + datatype='int') + burn_connections = wtools_lib.configget(config, 'parameters', + 'burn_connections', 100, + datatype='int') + burn_gauges = wtools_lib.configget(config, 'parameters', + 'burn_gauges', 100, + datatype='int') + minorder = wtools_lib.configget(config, 'parameters', + 'riverorder_min', 3, + datatype='int') + percentiles = np.array( + config.get('parameters', 'statisticmaps', '0, 100').replace( + ' ', '').split(','), dtype='float') + + # read the parameters for generating a temporary very high resolution grid + if unit_clone == 'degree': + cellsize_hr = wtools_lib.configget(config, 'parameters', + 'highres_degree', 0.0005, + datatype='float') + elif (unit_clone == 'metre') or (unit_clone == 'meter'): + cellsize_hr = wtools_lib.configget(config, 'parameters', + 'highres_metre', 50, + datatype='float') + + cols_hr = int((float(xmax)-float(xmin))/cellsize_hr + 2) + rows_hr = int((float(ymax)-float(ymin))/cellsize_hr + 2) + hr_trans = (float(xmin), cellsize_hr, float(0), + float(ymax), 0, -cellsize_hr) + clone_hr = os.path.join(options.destination, 'clone_highres.tif') + # make a highres clone as well! + wtools_lib.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0) + + # read staticmap locations + catchment_map = wtools_lib.configget(config, 'staticmaps', + 'catchment', 'wflow_catchment.map') + dem_map = wtools_lib.configget(config, 'staticmaps', + 'dem', 'wflow_dem.map') + demmax_map = wtools_lib.configget(config, 'staticmaps', + 'demmax', 'wflow_demmax.map') + demmin_map = wtools_lib.configget(config, 'staticmaps', + 'demmin', 'wflow_demmin.map') + gauges_map = wtools_lib.configget(config, 'staticmaps', + 'gauges', 'wflow_gauges.map') + landuse_map = wtools_lib.configget(config, 'staticmaps', + 'landuse', 'wflow_landuse.map') + ldd_map = wtools_lib.configget(config, 'staticmaps', + 'ldd', 'wflow_ldd.map') + river_map = wtools_lib.configget(config, 'staticmaps', + 'river', 'wflow_river.map') + outlet_map = wtools_lib.configget(config, 'staticmaps', + 'outlet', 'wflow_outlet.map') + riverlength_fact_map = wtools_lib.configget(config, 'staticmaps', + 'riverlength_fact', + 'wflow_riverlength_fact.map') + soil_map = wtools_lib.configget(config, 'staticmaps', + 'soil', 'wflow_soil.map') + streamorder_map = wtools_lib.configget(config, 'staticmaps', + 'streamorder', + 'wflow_streamorder.map') + subcatch_map = wtools_lib.configget(config, 'staticmaps', + 'subcatch', 'wflow_subcatch.map') + + # read mask location (optional) + masklayer = wtools_lib.configget(config, 'mask', 'masklayer', options.catchshp) + + + # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0)) + + # TODO: check if extents are correct this way + # TODO: check what the role of missing values is in zeros and ones (l. 123 in old code) + + # first add a missing value to dem_in + ds = gdal.Open(options.dem_in, gdal.GA_Update) + RasterBand = ds.GetRasterBand(1) + fill_val = RasterBand.GetNoDataValue() + + if fill_val is None: + RasterBand.SetNoDataValue(-9999) + ds = None + + # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python + # resample DEM + logger.info('Resampling dem from {:s} to {:s}'.format(os.path.abspath(options.dem_in), os.path.join(options.destination, dem_map))) + gis.gdal_warp(options.dem_in, clone_map, os.path.join(options.destination, dem_map), format='PCRaster', gdal_interp=gdalconst.GRA_Average) + # retrieve amount of rows and columns from clone + # TODO: make windowstats applicable to source/target with different projections. This does not work yet. + # retrieve srs from DEM + try: + srs_dem = wtools_lib.get_projection(options.dem_in) + except: + logger.warning('No projection found in DEM, assuming WGS 1984 lat long') + srs_dem = osr.SpatialReference() + srs_dem.ImportFromEPSG(4326) + clone2dem_transform = osr.CoordinateTransformation(srs,srs_dem) + #if srs.ExportToProj4() == srs_dem.ExportToProj4(): + for percentile in percentiles: + if percentile >= 100: + logger.info('computing window maximum') + percentile_dem = os.path.join(options.destination, 'wflow_dem_max.map') + elif percentile <= 0: + logger.info('computing window minimum') + percentile_dem = os.path.join(options.destination, 'wflow_dem_min.map') + else: + logger.info('computing window {:d} percentile'.format(int(percentile))) + percentile_dem = os.path.join(options.destination, 'wflow_dem_{:03d}.map'.format(int(percentile))) + + percentile_dem = os.path.join(options.destination, 'wflow_dem_{:03d}.map'.format(int(percentile))) + stats = wtools_lib.windowstats(options.dem_in, len(yax), len(xax), + trans, srs, percentile_dem, percentile, transform=clone2dem_transform,logger=logger) +# else: +# logger.warning('Projections of DEM and clone are different. DEM statistics for different projections is not yet implemented') + + """ + + # burn in rivers + # first convert and clip the river shapefile + # retrieve river shape projection, if not available assume EPSG:4326 + file_att = os.path.splitext(os.path.basename(options.rivshp))[0] + ds = ogr.Open(options.rivshp) + lyr = ds.GetLayerByName(file_att) + extent = lyr.GetExtent() + extent_in = [extent[0], extent[2], extent[1], extent[3]] + try: + # get spatial reference from shapefile + srs_rivshp = lyr.GetSpatialRef() + logger.info('Projection in river shapefile is {:s}'.format(srs_rivshp.ExportToProj4())) + except: + logger.warning('No projection found in {:s}, assuming WGS 1984 lat-lon'.format(options.rivshp)) + srs_rivshp = osr.SpatialReference() + srs_rivshp.ImportFromEPSG(4326) + rivprojshp = os.path.join(options.destination, 'rivshp_proj.shp') + logger.info('Projecting and clipping {:s} to {:s}'.format(options.rivshp, rivprojshp)) + # TODO: Line below takes a very long time to process, the bigger the shapefile, the more time. How do we deal with this? + call(('ogr2ogr','-s_srs', srs_rivshp.ExportToProj4(),'-t_srs', srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin), '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax), rivprojshp, options.rivshp)) + """ + + # TODO: BURNING!! + + + # project catchment layer to projection of clone + file_att = os.path.splitext(os.path.basename(options.catchshp))[0] + print options.catchshp + ds = ogr.Open(options.catchshp) + lyr = ds.GetLayerByName(file_att) + extent = lyr.GetExtent() + extent_in = [extent[0], extent[2], extent[1], extent[3]] + try: + # get spatial reference from shapefile + srs_catchshp = lyr.GetSpatialRef() + logger.info('Projection in catchment shapefile is {:s}'.format(srs_catchshp.ExportToProj4())) + except: + logger.warning('No projection found in {:s}, assuming WGS 1984 lat-lon'.format(options.catchshp)) + srs_catchshp = osr.SpatialReference() + srs_catchshp.ImportFromEPSG(4326) + catchprojshp = os.path.join(options.destination, 'catchshp_proj.shp') + logger.info('Projecting {:s} to {:s}'.format(options.catchshp, catchprojshp)) + call(('ogr2ogr','-s_srs', srs_catchshp.ExportToProj4(),'-t_srs', srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin), '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax), catchprojshp, options.catchshp)) + + # + logger.info('Calculating ldd') + ldddem = pcr.readmap(os.path.join(options.destination, dem_map)) + ldd_select=pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35) + pcr.report(ldd_select, os.path.join(options.destination, 'wflow_ldd.map')) + + # compute stream order, identify river cells + streamorder = pcr.ordinal(pcr.streamorder(ldd_select)) + river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1)) + # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out! + # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(options.destination, dem_map)),9999999))) + # dem_resample_map = pcr.cover(os.path.join(options.destination, dem_map), pcr.scalar(river)*0+mindem) + # pcr.report(dem_resample_map, os.path.join(options.destination, dem_map)) + pcr.report(streamorder, os.path.join(options.destination, streamorder_map)) + pcr.report(river, os.path.join(options.destination, river_map)) + + # deal with your catchments + if options.gaugeshp == None: + logger.info('No gauges defined, using outlets instead') + gauges = pcr.ordinal( + pcr.uniqueid( + pcr.boolean( + pcr.ifthen(pcr.scalar(ldd_select)==5, + pcr.boolean(1) + ) + ) + ) + ) + pcr.report(gauges, os.path.join(options.destination, gauges_map)) + # TODO: Add the gauge shape code from StaticMaps.py (line 454-489) + # TODO: add river length map (see SticMaps.py, line 492-499) + + # report river length + # make a high resolution empty map + dem_hr_file = os.path.join(options.destination, 'dem_highres.tif') + burn_hr_file = os.path.join(options.destination, 'burn_highres.tif') + demburn_hr_file = os.path.join(options.destination, 'demburn_highres.map') + riv_hr_file = os.path.join(options.destination, 'riv_highres.map') + gis.gdal_warp(options.dem_in, clone_hr, dem_hr_file) + # wtools_lib.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0) + file_att = os.path.splitext(os.path.basename(options.rivshp))[0] + # open the shape layer + ds = ogr.Open(options.rivshp) + lyr = ds.GetLayerByName(file_att) + gis.ogr_burn(lyr, clone_hr, -100, file_out=burn_hr_file, + format='GTiff', gdal_type=gdal.GDT_Float32, fill_value=0) + # read dem and burn values and add + xax_hr, yax_hr, burn_hr, fill = gis.gdal_readmap(burn_hr_file, 'GTiff') + burn_hr[burn_hr==fill] = 0 + xax_hr, yax_hr, dem_hr, fill = gis.gdal_readmap(dem_hr_file, 'GTiff') + dem_hr[dem_hr==fill] = np.nan + demburn_hr = dem_hr + burn_hr + demburn_hr[np.isnan(demburn_hr)] = -9999 + gis.gdal_writemap(demburn_hr_file, 'PCRaster', xax_hr, yax_hr, demburn_hr, -9999.) + pcr.setclone(demburn_hr_file) + demburn_hr = pcr.readmap(demburn_hr_file) + ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35) + pcr.report(ldd_hr, os.path.join(options.destination, 'ldd_hr.map')) + pcr.setglobaloption('unitcell') + riv_hr = pcr.scalar(pcr.streamorder(ldd_hr) >= minorder)*pcr.downstreamdist(ldd_hr) + pcr.report(riv_hr, riv_hr_file) + pcr.setglobaloption('unittrue') + pcr.setclone(clone_map) + logger.info('Computing river length') + #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform) + riverlength = wtools_lib.windowstats(riv_hr_file, len(yax), len(xax), + trans, srs, os.path.join(options.destination, riverlength_fact_map), stat='fact', logger=logger) + # TODO: nothing happends with the river lengths yet. Need to decide how to use these + + # report outlet map + pcr.report(pcr.ifthen(pcr.ordinal(ldd_select)==5, pcr.ordinal(1)), os.path.join(options.destination, outlet_map)) + + # report subcatchment map + subcatchment = pcr.subcatchment(ldd_select, gauges) + pcr.report(pcr.ordinal(subcatchment), os.path.join(options.destination, subcatch_map)) + + # Report land use map + if options.landuse == None: + logger.info('No land use map used. Preparing {:s} with only ones.'. + format(os.path.join(options.destination, landuse_map))) + pcr.report(pcr.nominal(ones), os.path.join(options.destination, landuse_map)) + else: + logger.info('Resampling land use from {:s} to {:s}'. + format(os.path.abspath(options.landuse), + os.path.join(options.destination, os.path.abspath(landuse_map)))) + gis.gdal_warp(options.landuse, + clone_map, + os.path.join(options.destination, landuse_map), + format='PCRaster', + gdal_interp=gdalconst.GRA_Mode, + gdal_type=gdalconst.GDT_Int32) + + # report soil map + if options.soil == None: + logger.info('No soil map used. Preparing {:s} with only ones.'. + format(os.path.join(options.destination, soil_map))) + pcr.report(pcr.nominal(ones), os.path.join(options.destination, soil_map)) + else: + logger.info('Resampling soil from {:s} to {:s}'. + format(os.path.abspath(options.soil), + os.path.join(options.destination, os.path.abspath(soil_map)))) + gis.gdal_warp(options.soil, + clone_map, + os.path.join(options.destination, soil_map), + format='PCRaster', + gdal_interp=gdalconst.GRA_Mode, + gdal_type=gdalconst.GDT_Int32) + + if options.lai == None: + logger.info('No vegetation LAI maps used. Preparing default maps {:s} with only ones.'. + format(os.path.join(options.destination, soil_map))) + pcr.report(pcr.nominal(ones), os.path.join(options.destination, soil_map)) + else: + dest_lai = os.path.join(options.destination, 'clim') + os.makedirs(dest_lai) + for month in range(12): + lai_in = os.path.join(options.lai, 'LAI00000.{:03d}'.format(month + 1)) + lai_out = os.path.join(dest_lai, 'LAI00000.{:03d}'.format(month + 1)) + logger.info('Resampling vegetation LAI from {:s} to {:s}'. + format(os.path.abspath(lai_in), + os.path.abspath(lai_out))) + gis.gdal_warp(lai_in, + clone_map, + lai_out, + format='PCRaster', + gdal_interp=gdalconst.GRA_Bilinear, + gdal_type=gdalconst.GDT_Float32) + + # report soil map + if options.other_maps == None: + logger.info('No other maps used. Skipping other maps.') + else: + logger.info('Resampling list of other maps...') + for map_file in options.other_maps: + map_name = os.path.split(map_file)[1] + logger.info('Resampling a map from {:s} to {:s}'. + format(os.path.abspath(map_file), + os.path.join(options.destination, map_name))) + gis.gdal_warp(map_file, + clone_map, + os.path.join(options.destination, map_name), + format='PCRaster', + gdal_interp=gdalconst.GRA_Mode, + gdal_type=gdalconst.GDT_Float32) + + + if options.clean: + wtools_lib.DeleteList(glob.glob(os.path.join(options.destination, '*.xml')), + logger=logger) + wtools_lib.DeleteList(glob.glob(os.path.join(options.destination, 'clim', '*.xml')), + logger=logger) + wtools_lib.DeleteList(glob.glob(os.path.join(options.destination, '*highres*')), + logger=logger) + + +if __name__ == "__main__": + main() + Index: wflow-py/wflow/wtools_py/wflowtools_lib.py =================================================================== diff -u --- wflow-py/wflow/wtools_py/wflowtools_lib.py (revision 0) +++ wflow-py/wflow/wtools_py/wflowtools_lib.py (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -0,0 +1,827 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 12 16:07:41 2014 + +@author: tollena +""" + +import numpy as np +import ConfigParser +from subprocess import call +import copy +import pcraster as pcr +import sys +import logging +import logging.handlers + +from osgeo import ogr +from osgeo import gdal +from osgeo.gdalconst import * +from osgeo import osr +import os +import numpy as np +import pdb +#srs = osr.SpatialReference() +#srs.ImportFromEPSG(4326) + +Driver = ogr.GetDriverByName("ESRI Shapefile") + +def setlogger(logfilename, logReference, verbose=True): + """ + Set-up the logging system. Exit if this fails + """ + try: + #create logger + logger = logging.getLogger(logReference) + logger.setLevel(logging.DEBUG) + ch = logging.handlers.RotatingFileHandler(logfilename,maxBytes=10*1024*1024, backupCount=5) + console = logging.StreamHandler() + console.setLevel(logging.DEBUG) + ch.setLevel(logging.DEBUG) + #create formatter + formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + #add formatter to ch + ch.setFormatter(formatter) + console.setFormatter(formatter) + #add ch to logger + logger.addHandler(ch) + logger.addHandler(console) + logger.debug("File logging to " + logfilename) + return logger, ch + except IOError: + print "ERROR: Failed to initialize logger with logfile: " + logfilename + sys.exit(1) + +def closeLogger(logger, ch): + logger.removeHandler(ch) + ch.flush() + ch.close() + return logger, ch + +def close_with_error(logger, ch, msg): + logger.error(msg) + logger, ch = closeLogger(logger, ch) + del logger, ch + sys.exit(1) + +def OpenConf(fn): + config = ConfigParser.SafeConfigParser() + config.optionxform = str + + if os.path.exists(fn): + config.read(fn) + else: + print "Cannot open config file: " + fn + sys.exit(1) + + return config + +def configget(config, section, var, default, datatype='str'): + """ + + Gets a string from a config file (.ini) and returns a default value if + the key is not found. If the key is not found it also sets the value + with the default in the config-file + + Input: + - config - python ConfigParser object + - section - section in the file + - var - variable (key) to get + - default - default value + - datatype='str' - can be set to 'boolean', 'int', 'float' or 'str' + + Returns: + - value (str, boolean, float or int) - either the value from the config file or the default value + + + """ + Def = False + try: + if datatype == 'int': + ret = config.getint(section, var) + elif datatype == 'float': + ret = config.getfloat(section, var) + elif datatype == 'boolean': + ret = config.getboolean(section, var) + else: + ret = config.get(section, var) + except: + Def = True + ret = default + configset(config, section, var, str(default), overwrite=False) + + default = Def + return ret + +def configset(config, section, var, value, overwrite=False): + """ + Sets a string in the in memory representation of the config object + Deos NOT overwrite existing values if overwrite is set to False (default) + + Input: + - config - python ConfigParser object + - section - section in the file + - var - variable (key) to set + - value - the value to set + - overwrite (optional, default is False) + + Returns: + - nothing + + """ + + if not config.has_section(section): + config.add_section(section) + config.set(section,var,value) + else: + if not config.has_option(section,var): + config.set(section,var,value) + else: + if overwrite: + config.set(section, var, value) + +def get_geotransform(filename): + ''' Return geotransform of dataset''' + ds = gdal.Open(filename, GA_ReadOnly) + gt = ds.GetGeoTransform() + return gt + +def get_extent(filename): + ''' Return list of corner coordinates from a dataset''' + ds = gdal.Open(filename, GA_ReadOnly) + gt = ds.GetGeoTransform() + # 'top left x', 'w-e pixel resolution', '0', 'top left y', '0', 'n-s pixel resolution (negative value)' + nx, ny = ds.RasterXSize, ds.RasterYSize + xmin = np.float64(gt[0]) + ymin = np.float64(gt[3]) +np.float64(ny) * np.float64(gt[5]) + xmax = np.float64(gt[0]) + np.float64(nx) * np.float64(gt[1]) + ymax = np.float64(gt[3]) + ds = None + return xmin, ymin, xmax, ymax + +def get_projection(filename): + ds = gdal.Open(filename, GA_ReadOnly) + WktString = ds.GetProjection() + srs = osr.SpatialReference() + srs.ImportFromWkt(WktString) + ds = None + return srs + +def round_extent(extent, snap, prec): + """Increases the extent until all sides lie on a coordinate + divideable by 'snap'.""" + xmin, ymin, xmax, ymax = extent + snap = float(snap) # prevent integer division issues + xmin = round(np.ceil(xmin/snap)*snap, prec) + ymin = round(np.ceil(ymin/snap)*snap, prec) + xmax = round(np.floor(xmax/snap)*snap, prec) + ymax = round(np.floor(ymax/snap)*snap, prec) + return xmin, ymin, xmax, ymax + +def DeleteShapes(shapes): + shapelist = list(shapes) + for shape in shapelist: + if os.path.exists(shape): + Driver.DeleteDataSource(shape) + print "shapefile deleted: " + shape + +def MergeShapes(shapesin, Layer): + for SHP in shapesin: + if os.path.exists(SHP): + ATT = os.path.splitext(os.path.basename(SHP))[0] + DATA = ogr.Open(SHP) + LYR = DATA.GetLayerByName(ATT) + LYR.ResetReading() + for idx, i in enumerate(range(LYR.GetFeatureCount())): + oldfeature = LYR.GetFeature(i) + geometry = oldfeature.geometry() + feature = ogr.Feature(Layer.GetLayerDefn()) + feature.SetGeometry(geometry) + feature.SetField("ID",oldfeature.GetFieldAsString(0)) + Layer.CreateFeature(feature) + DATA.Destroy() + +def readMap(fileName, fileFormat): + """ + Read geographical file into memory + """ + # Open file for binary-reading + mapFormat = gdal.GetDriverByName(fileFormat) + mapFormat.Register() + ds = gdal.Open(fileName) + if ds is None: + print 'Could not open ' + fileName + '. Something went wrong!! Shutting down' + sys.exit(1) + # Retrieve geoTransform info + geotrans = ds.GetGeoTransform() + originX = geotrans[0] + originY = geotrans[3] + resX = geotrans[1] + resY = geotrans[5] + cols = ds.RasterXSize + rows = ds.RasterYSize + x = np.linspace(originX+resX/2,originX+resX/2+resX*(cols-1),cols) + y = np.linspace(originY+resY/2,originY+resY/2+resY*(rows-1),rows) + # Retrieve raster + RasterBand = ds.GetRasterBand(1) # there's only 1 band, starting from 1 + data = RasterBand.ReadAsArray(0,0,cols,rows) + FillVal = RasterBand.GetNoDataValue() + RasterBand = None + ds = None + return x, y, data, FillVal + +def Reach2Nodes(SHP,EPSG,toll,storedir): + srs = osr.SpatialReference() + if not EPSG == None: + srs.ImportFromEPSG(int(EPSG)) + SHP_ATT = os.path.splitext(os.path.basename(SHP))[0] + END_SHP = storedir + SHP_ATT + "_end.shp" + START_SHP = storedir + SHP_ATT + "_start.shp" + CONN_SHP = storedir + SHP_ATT + "_connection.shp" + + DeleteShapes([END_SHP,START_SHP,CONN_SHP]) + + fieldDef = ogr.FieldDefn("ID", ogr.OFTString) + fieldDef.SetWidth(12) + + END_out = Driver.CreateDataSource(END_SHP) + END_ATT = os.path.splitext(os.path.basename(END_SHP))[0] + if not EPSG == None: + END_LYR = END_out.CreateLayer(END_ATT, srs, geom_type=ogr.wkbPoint) + else: END_LYR = END_out.CreateLayer(END_ATT, geom_type=ogr.wkbPoint) + END_LYR.CreateField(fieldDef) + + START_out = Driver.CreateDataSource(START_SHP) + START_ATT = os.path.splitext(os.path.basename(START_SHP))[0] + if not EPSG == None: + START_LYR = START_out.CreateLayer(START_ATT, srs, geom_type=ogr.wkbPoint) + else: START_LYR = START_out.CreateLayer(START_ATT, geom_type=ogr.wkbPoint) + START_LYR.CreateField(fieldDef) + + CONN_out = Driver.CreateDataSource(CONN_SHP) + CONN_ATT = os.path.splitext(os.path.basename(CONN_SHP))[0] + if not EPSG == None: + CONN_LYR = CONN_out.CreateLayer(CONN_ATT, srs, geom_type=ogr.wkbPoint) + else: CONN_LYR = CONN_out.CreateLayer(CONN_ATT, geom_type=ogr.wkbPoint) + CONN_LYR.CreateField(fieldDef) + + StartCoord = [] + EndCoord = [] + + ATT = os.path.splitext(os.path.basename(SHP))[0] + DATA = ogr.Open(SHP) + LYR = DATA.GetLayerByName(ATT) + LYR.ResetReading() + for i in range(LYR.GetFeatureCount()): + feature = LYR.GetFeature(i) + geometry = feature.geometry() + StartCoord.append([geometry.GetX(0),geometry.GetY(0)]) + points = geometry.GetPointCount() + EndCoord.append([geometry.GetX(points-1),geometry.GetY(points-1)]) + + DATA.Destroy() + + Connections = [[np.nan,np.nan],[np.nan,np.nan]] + + for i in range(len(StartCoord)): + if not True in np.all(np.isclose(StartCoord[i],EndCoord,rtol=0,atol=toll),axis=1): + #point is startpoint + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(StartCoord[i][0],StartCoord[i][1]) + feature = ogr.Feature(START_LYR.GetLayerDefn()) + feature.SetGeometry(point) + START_LYR.CreateFeature(feature) + else: + #point is a connection + if not True in np.all(np.isclose(StartCoord[i],Connections,rtol=0,atol=toll),axis=1): + Connections.append(StartCoord[i]) + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(StartCoord[i][0],StartCoord[i][1]) + feature = ogr.Feature(CONN_LYR.GetLayerDefn()) + feature.SetGeometry(point) + CONN_LYR.CreateFeature(feature) + if not True in np.all(np.isclose(EndCoord[i],StartCoord,rtol=0,atol=toll),axis=1): + #point is end + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(EndCoord[i][0],EndCoord[i][1]) + feature = ogr.Feature(END_LYR.GetLayerDefn()) + feature.SetGeometry(point) + END_LYR.CreateFeature(feature) + else: + #point is a connection + if not True in np.all(np.isclose(EndCoord[i],Connections,rtol=0,atol=toll),axis=1): + Connections.append(EndCoord[i]) + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(EndCoord[i][0],EndCoord[i][1]) + feature = ogr.Feature(CONN_LYR.GetLayerDefn()) + feature.SetGeometry(point) + CONN_LYR.CreateFeature(feature) + + END_out.Destroy() + START_out.Destroy() + CONN_out.Destroy() + return START_SHP, END_SHP, CONN_SHP + +def ReachOrder(SHP,EPSG,toll,storedir): + if not EPSG == None: + srs.ImportFromEPSG(int(EPSG)) + SHP_ATT = os.path.splitext(os.path.basename(SHP))[0] + ORDER_SHP = storedir + SHP_ATT + "_order1.shp" + FileOrder = [] + FileOrder.append(ORDER_SHP) + + if os.path.exists(ORDER_SHP): + Driver.DeleteDataSource(ORDER_SHP) + + IDField = ogr.FieldDefn("ID", ogr.OFTString) + IDField.SetWidth(12) + + OrderField = ogr.FieldDefn("ORDER", ogr.OFTString) + OrderField.SetWidth(12) + + StartCoord = [] + EndCoord = [] + + ATT = os.path.splitext(os.path.basename(SHP))[0] + DATA = ogr.Open(SHP) + LYR = DATA.GetLayerByName(ATT) + LYR.ResetReading() + for i in range(LYR.GetFeatureCount()): + feature = LYR.GetFeature(i) + geometry = feature.geometry() + StartCoord.append([geometry.GetX(0),geometry.GetY(0)]) + points = geometry.GetPointCount() + EndCoord.append([geometry.GetX(points-1),geometry.GetY(points-1)]) + EndCoord_np = np.array(EndCoord) + reaches = copy.deepcopy(i)+1 + ReachIDs = range(reaches) + ReachOrders = np.array([None] * len(ReachIDs)) + + order = 1 + ordercoord = copy.deepcopy(EndCoord) + tempcoord = [] + endpoints = 0 + for i in ReachIDs: + ReachStart = StartCoord[i] + ReachEnd = EndCoord[i] + if not True in np.all(np.isclose(ReachStart,ordercoord,rtol=0,atol=toll), axis=1): + ReachOrders[i] = order + tempcoord.append(ReachEnd) + if not True in np.all(np.isclose(ReachEnd,StartCoord,rtol=0,atol=toll),axis=1): + endpoints += 1 + ordercoord = copy.deepcopy(tempcoord) + order += 1 + iterations = 0 + + while None in list(ReachOrders) and iterations <= 100: + iterations += 1 + OrderMove = False + for i in ReachIDs: + if ReachOrders[i] == None: + ReachStart = StartCoord[i] + ReachEnd = EndCoord[i] + OrderSelect = ReachOrders[np.all(np.isclose(ReachStart,EndCoord_np,rtol=0,atol=toll), axis=1)] + if not None in list(OrderSelect): + if all(x == list(OrderSelect)[0] for x in list(OrderSelect)) == True: + OrderMove = True + ReachOrders[i] = order + else: + ReachOrders[i] = int(np.max(OrderSelect)) + if OrderMove: + order += 1 + + if None in list(ReachOrders): + print "Conversion of river to orders failed. Try to use a smaller tollerance" + #sys.exit(1) + + LYR.ResetReading() + + for i in range(1,max(ReachOrders)+1): + order = i + ORDER_SHP = storedir + SHP_ATT + "_order" + str(order)+".shp" + FileOrder.append(ORDER_SHP) + if os.path.exists(ORDER_SHP): + Driver.DeleteDataSource(ORDER_SHP) + ORDER_out = Driver.CreateDataSource(ORDER_SHP) + ORDER_ATT = os.path.splitext(os.path.basename(ORDER_SHP))[0] + if not EPSG == None: + ORDER_LYR = ORDER_out.CreateLayer(ORDER_ATT, srs, geom_type=ogr.wkbLineString) + else: ORDER_LYR = ORDER_out.CreateLayer(ORDER_ATT, geom_type=ogr.wkbLineString) + ORDER_LYR.CreateField(IDField) + ORDER_LYR.CreateField(OrderField) + for j in range(LYR.GetFeatureCount()): + if ReachOrders[j] == order: + orgfeature = LYR.GetFeature(j) + geometry = orgfeature.geometry() + feature = ogr.Feature(ORDER_LYR.GetLayerDefn()) + feature.SetGeometry(geometry) + feature.SetField("ID",str(j)) + feature.SetField("ORDER",str(order)) + ORDER_LYR.CreateFeature(feature) + ORDER_out.Destroy() + + DATA.Destroy() + return FileOrder + +def Burn2Tif(shapes,attribute,TIFF): + for shape in shapes: + shape_att = os.path.splitext(os.path.basename(shape))[0] + os.system("gdal_rasterize -a " + str(attribute) + " -l " + shape_att + " " + shape + " " + TIFF) + +def ReverseMap(MAP): + MAX = int(np.max(pcr.pcr2numpy(MAP,np.NAN))) + REV_MAP = pcr.ordinal(pcr.ifthen(pcr.scalar(MAP) == pcr.scalar(-9999), pcr.scalar(0))) + for i in range(MAX+1): + if i > 0: + print i + REV_MAP = pcr.cover(pcr.ifthen(pcr.ordinal(MAP) == pcr.ordinal(i), pcr.ordinal(pcr.scalar(MAX+1)-pcr.scalar(i))),REV_MAP) + REV_MAP = pcr.cover(REV_MAP, pcr.ordinal(MAP)) + return REV_MAP + +def DeleteList(itemlist): + for item in itemlist: + print 'file deleted: ' + os.path.basename(item) + os.remove(item) + +def Tiff2Point(TIFF): + DS = gdal.Open(TIFF,GA_ReadOnly) + if DS is None: + print 'Could not open ' + fn + sys.exit(1) + + cols = DS.RasterXSize + rows = DS.RasterYSize + geotransform = DS.GetGeoTransform() + originX = geotransform[0] + originY = geotransform[3] + pixelWidth = geotransform[1] + pixelHeight = geotransform[5] + + band = DS.GetRasterBand(1) + NoData = band.GetNoDataValue() + + ATT = os.path.splitext(os.path.basename(TIFF))[0] + SHP = ATT + ".shp" + + if os.path.exists(SHP): + Driver.DeleteDataSource(SHP) + + SHP_out = Driver.CreateDataSource(SHP) + SHP_LYR = SHP_out.CreateLayer(ATT, geom_type=ogr.wkbPoint) + + fieldDef = ogr.FieldDefn("ID", ogr.OFTInteger) + fieldDef.SetWidth(12) + SHP_LYR.CreateField(fieldDef) + + fieldDef = ogr.FieldDefn("X", ogr.OFTReal) + fieldDef.SetWidth(20) + fieldDef.SetPrecision(5) + SHP_LYR.CreateField(fieldDef) + + fieldDef = ogr.FieldDefn("Y", ogr.OFTReal) + fieldDef.SetWidth(20) + fieldDef.SetPrecision(5) + SHP_LYR.CreateField(fieldDef) + + for x in range(cols): + for y in range(rows): + value = band.ReadAsArray(x, y, 1, 1) + if not value == NoData: + xCoord = originX + (0.5 + x)*pixelWidth + yCoord = originY + (y+0.5)*pixelHeight + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(xCoord,yCoord) + feat_out = ogr.Feature(SHP_LYR.GetLayerDefn()) + feat_out.SetGeometry(point) + feat_out.SetField("ID", str(int(value[0][0]))) + feat_out.SetField("X", xCoord) + feat_out.SetField("Y", yCoord) + SHP_LYR.CreateFeature(feat_out) + + SHP_out.Destroy() + DS = None + +def GridDef(TIFF,XML): + DS = gdal.Open(TIFF,GA_ReadOnly) + if DS is None: + print 'Could not open ' + fn + sys.exit(1) + + cols = DS.RasterXSize + rows = DS.RasterYSize + geotransform = DS.GetGeoTransform() + originX = geotransform[0] + originY = geotransform[3] + pixelWidth = geotransform[1] + pixelHeight = geotransform[5] + DS = None + Grid_xml = open(XML, 'w+') + Grid_xml.write('\n') + Grid_xml.write('\t'+str(rows)+'\n') + Grid_xml.write('\t'+str(cols)+'\n') + Grid_xml.write('\tGEODATUM\n') + Grid_xml.write('\t\n') + Grid_xml.write('\t\t'+str(originX + 0.5*pixelWidth)+'\n') + Grid_xml.write('\t\t'+str(originY + 0.5*pixelHeight)+'\n') + Grid_xml.write('\t\n') + Grid_xml.write('\t'+str(pixelWidth)+'\n') + Grid_xml.write('\t'+str(pixelWidth)+'\n') + Grid_xml.write('\n') + +def PCR_river2Shape(rivermap, drainmap, ordermap,lddmap,SHP_FILENAME,catchmentmap,srs=None): +# rivermap = riversid_map +# drainmap = drain_map +# ordermap = streamorder_map +# lddmap = ldd_map +# SHP_FILENAME = rivshp + counter = 0. + percentage = 0. + file_att = os.path.splitext(os.path.basename(SHP_FILENAME))[0] + x, y, riversid, FillVal = readMap(rivermap, 'PCRaster');riversid[riversid==FillVal]= -1 + x, y, strahlerorder, FillVal = readMap(ordermap, 'PCRaster');strahlerorder[strahlerorder==FillVal]= -1 + x, y, catchment, FillVal = readMap(catchmentmap, 'PCRaster');catchment[catchment==FillVal]= -1 + x, y, drain, FillVal = readMap(drainmap, 'PCRaster');drain[drain==FillVal]= np.nan + x, y, ldd, FillVal = readMap(lddmap, 'PCRaster'); + xi,yi = np.meshgrid(x,y) + + # mesh of surrounding pixels + xi_window, yi_window = np.meshgrid(range(-1,2),range(-1,2)) + # mesh of ldd grid values + ldd_values = np.array([[7, 8, 9],[4, 5, 6],[1, 2, 3]]) + [iiy,iix] = np.where(riversid>0) + riverId = riversid[iiy,iix] + maxRiverId = riverId.max() + + # Create new shapefile + ogr.UseExceptions() + ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource(SHP_FILENAME) + layer_line = ds.CreateLayer(file_att, srs, ogr.wkbLineString) + + river_ID = ogr.FieldDefn() + river_ID.SetName('ORDER') + river_ID.SetType(ogr.OFTInteger) + river_ID.SetWidth(6) + layer_line.CreateField(river_ID) + + river_ID = ogr.FieldDefn() + river_ID.SetName('CATCHMENT') + river_ID.SetType(ogr.OFTInteger) + river_ID.SetWidth(6) + layer_line.CreateField(river_ID) + + # Create a new line geometry per river segment + for id in np.arange(1,maxRiverId + 1): + #for id in range(25,26): + #print 'Writing line element "' + str(id) + '"' + y_idx, x_idx = np.where(riversid == id) + drain_idx = drain[y_idx, x_idx] + lat_select = yi[y_idx, x_idx] + lon_select = xi[y_idx, x_idx] + strahlerorder_select = strahlerorder[y_idx, x_idx] + catchment_select = catchment[y_idx, x_idx] + order = drain_idx.argsort() + lat_select = lat_select[order] + lon_select = lon_select[order] + catchment_select = catchment_select[order] + strahlerorder_select = strahlerorder_select[order] + line = ogr.Geometry(type=ogr.wkbLineString) + # add points sequentially to line segment + for nr in range(0,len(lat_select)): + #line_latlon.AddPoint(np.float64(lon_select[nr]), np.float64(lat_select[nr])) + line.AddPoint(np.float64(lon_select[nr]), np.float64(lat_select[nr])) + # now find the point downstream of the last pixel from the ldd, which is connected with the downstream river + try: + xi_select = xi[y_idx[order][-1] + yi_window, x_idx[order][-1] + xi_window] + yi_select = yi[y_idx[order][-1] + yi_window, x_idx[order][-1] + xi_window] + ldd_at_pos = ldd[y_idx[order][-1], x_idx[order][-1]] + ldd_y, ldd_x = np.where(ldd_values==ldd_at_pos) + downstream_y = yi_select[ldd_y, ldd_x] + downstream_x = xi_select[ldd_y, ldd_x] + line.AddPoint(np.float64(downstream_x), np.float64(downstream_y)) + except: + continue + # most downstream point of segment is on the boundary of the map, so skip this step + #print 'River segment id: %g is on boundary of the map' % id + # Add line as a new feature to the shapefiles + feature = ogr.Feature(feature_def=layer_line.GetLayerDefn()) + feature.SetGeometryDirectly(line) + feature.SetField('ORDER', int(strahlerorder_select[0])) + feature.SetField('CATCHMENT', int(catchment_select[0])) + counter = counter + 1 + if (float(id)/float(maxRiverId))*100. > percentage: + #logger.info(' ' + str(int(percentage)) + '% completed') + percentage = percentage + 10. + #print 'Writing polyline ' + str(id) + ' of ' + str(maxRiverId) + layer_line.CreateFeature(feature) + # Cleanup + feature.Destroy() + ds.Destroy() + +def snaptomap(points,mmap): + """ + Snap the points in _points_ to nearest non missing + values in _mmap_. Can be used to move gauge locations + to the nearest rivers. + + Input: + - points - map with points to move + - mmap - map with points to move to + + Return: + - map with shifted points + """ + points = pcr.cover(points,0) + # Create unique id map of mmap cells + unq = pcr.nominal(pcr.cover(pcr.uniqueid(pcr.defined(mmap)),pcr.scalar(0.0))) + # Now fill holes in mmap map with lues indicating the closes mmap cell. + dist_cellid = pcr.scalar(pcr.spreadzone(unq,0,1)) + # Get map with values at location in points with closes mmap cell + dist_cellid = pcr.ifthenelse(points > 0, dist_cellid, 0) + # Spread this out + dist_fill = pcr.spreadzone(pcr.nominal(dist_cellid),0,1) + # Find the new (moved) locations + npt = pcr.uniqueid(pcr.boolean(pcr.ifthen(dist_fill == unq, unq))) + # Now recreate the original value in the points maps + ptcover = pcr.spreadzone(pcr.cover(points,0),0,1) + # Now get the org point value in the pt map + nptorg = pcr.ifthen(npt > 0, ptcover) + + return nptorg + +def Raster2Pol(rasterin,shapeout,srs=None,ID='ID'): +# rasterin = catchments_tif +# shapeout = catchments_shp +# ID = 'ID' + sourceRaster = gdal.Open(rasterin) + band = sourceRaster.GetRasterBand(1) + NoData = band.GetNoDataValue() + file_att = os.path.splitext(os.path.basename(shapeout))[0] + driver = ogr.GetDriverByName("ESRI Shapefile") + if os.path.exists(shapeout): + driver.DeleteDataSource(shapeout) + outDatasource = driver.CreateDataSource(shapeout) + outLayer = outDatasource.CreateLayer(file_att, srs) + newField = ogr.FieldDefn(ID, ogr.OFTInteger) + outLayer.CreateField(newField) + gdal.Polygonize(band, band, outLayer, 0, [], callback=None ) + outDatasource.Destroy() + sourceRaster = None + +def windowstats(rasterin,t_rows,t_columns,t_geotransform,t_srs,resultdir,stat=np.array([50]),transform=False, logger=logging): + """ + :param rasterin: original DEM data + :param t_rows: number of rows in the final maps + :param t_columns: number of columns in the final maps + :param t_geotransform: geotransform of final maps + :param t_srs: srs of final maps + :param resultdir: output folder + :param stat: percentile to compute + :param transform: + :param logger: + :return: + """ + +# activate this if you want to write a window shapefile +# if os.path.exists("windows.shp"): +# Driver.DeleteDataSource("windows.shp") +# window_out = Driver.CreateDataSource("windows.shp") +# window_lyr = window_out.CreateLayer("windows", geom_type=ogr.wkbLineString) +# fieldDef = ogr.FieldDefn("ID", ogr.OFTString) +# fieldDef.SetWidth(12) +# window_lyr.CreateField(fieldDef) + + # read properties of input raster + #print transform + #transform = True + if isinstance(stat, str): + stat = np.array([stat]) + ds_in = gdal.Open(rasterin,GA_ReadOnly) + in_trans = ds_in.GetGeoTransform() + cellsize_in = in_trans[1] + xorg_in = in_trans[0] + yorg_in = in_trans[3] + # read properties of output raster + cellsize_out = t_geotransform[1] + xorg = t_geotransform[0] + yorg = t_geotransform[3] + origin = (xorg_in,yorg_in) + #compute statistics to new data set + band_in = ds_in.GetRasterBand(1) + nodata = band_in.GetNoDataValue() + array_out = np.ones((t_rows,t_columns,len(stat)),dtype=np.float) * nodata + blocks = t_rows * t_columns + counter = 0 + percentage = 0. + for row in range(t_rows): + print 'doing row ' + str(row+1) + '/' + str(t_rows) + for col in range(t_columns): + counter = counter + 1 + if (float(counter)/float(blocks))*100. > percentage: + logger.info(' ' + str(int(percentage)) + '% completed') + percentage = percentage + 10. + #determine window boundaries + xl = xorg + (col * cellsize_out) + xr = xorg + ((col + 1) * cellsize_out) + yt = yorg - (row * cellsize_out) + yb = yorg - ((row + 1) * cellsize_out) + #print xl, xr, yt, yb + if not transform: + coordtl = (xl,yt) + coordbr = (xr,yb) + else: + coordtl = transform.TransformPoint(xl,yt) + coordbr = transform.TransformPoint(xr,yb) + xOffset = int((coordtl[0] - origin[0]) / cellsize_in) + yOffset = int((coordtl[1] - origin[1]) / -cellsize_in) + xBlock = int((coordbr[0] - coordtl[0])/cellsize_in + 1) + yBlock = int((coordtl[1] - coordbr[1])/cellsize_in + 1) + data_block = band_in.ReadAsArray(xOffset, yOffset, xBlock, yBlock).astype(float) + data_block[data_block==float(nodata)]=np.nan + #print data_block + #data_block = data_block.astype(int) + #print data_block + for idx, perc in enumerate(stat): + if perc == 'frac': + array_out[row,col,idx] = np.max([(np.sum(data_block)*cellsize_in)/cellsize_out,1]) + elif perc == 'sum': + array_out[row,col,idx] = np.sum(data_block) + else: array_out[row,col,idx] = np.percentile(data_block,int(perc)) + +# activate this if you want to write a window shapefile +# line = ogr.Geometry(ogr.wkbLineString) +# line.AddPoint(xl,yt) +# line.AddPoint(xr,yt) +# line.AddPoint(xr,yb) +# line.AddPoint(xl,yb) +# line.AddPoint(xl,yt) +# feature = ogr.Feature(window_lyr.GetLayerDefn()) +# feature.SetGeometry(line) +# feature.SetField("ID",str(counter)) +# window_lyr.CreateFeature(feature) + +# activate this if you want to write a window shapefile +# window_out.Destroy() + + array_out[np.isnan(array_out)]=nodata + names = [] + #write rasters + for idx, perc in enumerate(stat): + if perc == 100: + name = 'max' + #print 'computing window maximum' + name_map = os.path.join(resultdir, 'wflow_dem{:s}.map'.format(name)) + elif perc == 0: + name = 'min' + #print 'computing window minimum' + name_map = os.path.join(resultdir, 'wflow_dem{:s}.map'.format(name)) + elif perc == 'frac': + name = 'frac' + #print 'computing window fraction' + name_map = os.path.join(resultdir, 'wflow_riverlength_fact.map') + elif perc == 'sum': + name = 'sum' + #print 'computing window sum' + name_map = os.path.join(resultdir, 'windowsum.map') + else: + logger.info('computing window {:d} percentile'.format(int(perc))) + name_map = os.path.join(resultdir, 'wflow_dem{:02d}.map'.format(int(perc))) + names.append(name) + # name_tif = 'work\\dem_' + name + '.tif' + ds_out = gdal.GetDriverByName('MEM').Create('', t_columns, t_rows, 1, GDT_Float32) + ds_out.SetGeoTransform(t_geotransform) + ds_out.SetProjection(t_srs.ExportToWkt()) + band_out = ds_out.GetRasterBand(1) + band_out.WriteArray(array_out[:,:,idx], 0, 0) + band_out.SetNoDataValue(nodata) + histogram = band_out.GetDefaultHistogram() + band_out.SetDefaultHistogram(histogram[0],histogram[1], histogram[3]) + ds_in = None + gdal.GetDriverByName('PCRaster').CreateCopy(name_map, ds_out, 0) + ds_out = None + + # call(('gdal_translate','-of','PCRaster','-ot','Float32', name_tif,name_map)) + return names + +def CreateTif(TIF,rows,columns,geotransform,srs,fill=-9999): + ds = gdal.GetDriverByName('GTiff').Create(TIF, columns,rows, 1, GDT_Float32) + ds.SetGeoTransform(geotransform) + ds.SetProjection(srs.ExportToWkt()) + band = ds.GetRasterBand(1) + array = np.ones((rows,columns),dtype=np.float) * fill + band.WriteArray(array, 0, 0) + band.SetNoDataValue(-9999) + ds = None + +def GetRasterTranform(rasterin,srsout): + ds = gdal.Open(rasterin,GA_ReadOnly) + spatialref = None + transform = False + EPSG = False + spatialref = ds.GetProjection() + if not spatialref == None: + srsin = osr.SpatialReference() + srsin.ImportFromWkt(spatialref) + srsin.AutoIdentifyEPSG() + EPSG = 'EPSG:'+srsin.GetAuthorityCode(None) + transform = osr.CoordinateTransformation(srsin,srsout) + return transform, EPSG \ No newline at end of file Index: wflow-py/wflow/wtools_py/wtools_lib.py =================================================================== diff -u --- wflow-py/wflow/wtools_py/wtools_lib.py (revision 0) +++ wflow-py/wflow/wtools_py/wtools_lib.py (revision e24525b8bd3b87f5d4d432f6c704e7fe4dc74c8b) @@ -0,0 +1,802 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 12 16:07:41 2014 + +@author: tollena +""" + +import numpy as np +import ConfigParser +from subprocess import call +import copy +import pcraster as pcr +import sys +import logging +import logging.handlers + +from osgeo import ogr +from osgeo import gdal +from osgeo.gdalconst import * +from osgeo import osr +import os +import numpy as np +import pdb +#srs = osr.SpatialReference() +#srs.ImportFromEPSG(4326) +#Driver = ogr.GetDriverByName("ESRI Shapefile") + +def setlogger(logfilename, logReference, verbose=True): + """ + Set-up the logging system. Exit if this fails + """ + try: + #create logger + logger = logging.getLogger(logReference) + logger.setLevel(logging.DEBUG) + ch = logging.handlers.RotatingFileHandler(logfilename,maxBytes=10*1024*1024, backupCount=5) + console = logging.StreamHandler() + console.setLevel(logging.DEBUG) + ch.setLevel(logging.DEBUG) + #create formatter + formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + #add formatter to ch + ch.setFormatter(formatter) + console.setFormatter(formatter) + #add ch to logger + logger.addHandler(ch) + logger.addHandler(console) + logger.debug("File logging to " + logfilename) + return logger, ch + except IOError: + print "ERROR: Failed to initialize logger with logfile: " + logfilename + sys.exit(1) + +def closeLogger(logger, ch): + logger.removeHandler(ch) + ch.flush() + ch.close() + return logger, ch + +def close_with_error(logger, ch, msg): + logger.error(msg) + logger, ch = closeLogger(logger, ch) + del logger, ch + sys.exit(1) + +def OpenConf(fn): + config = ConfigParser.SafeConfigParser() + config.optionxform = str + + if os.path.exists(fn): + config.read(fn) + else: + print "Cannot open config file: " + fn + sys.exit(1) + + return config + +def configget(config, section, var, default, datatype='str'): + """ + + Gets a string from a config file (.ini) and returns a default value if + the key is not found. If the key is not found it also sets the value + with the default in the config-file + + Input: + - config - python ConfigParser object + - section - section in the file + - var - variable (key) to get + - default - default value + - datatype='str' - can be set to 'boolean', 'int', 'float' or 'str' + + Returns: + - value (str, boolean, float or int) - either the value from the config file or the default value + + + """ + Def = False + try: + if datatype == 'int': + ret = config.getint(section, var) + elif datatype == 'float': + ret = config.getfloat(section, var) + elif datatype == 'boolean': + ret = config.getboolean(section, var) + else: + ret = config.get(section, var) + except: + Def = True + ret = default + configset(config, section, var, str(default), overwrite=False) + + default = Def + return ret + +def configset(config, section, var, value, overwrite=False): + """ + Sets a string in the in memory representation of the config object + Deos NOT overwrite existing values if overwrite is set to False (default) + + Input: + - config - python ConfigParser object + - section - section in the file + - var - variable (key) to set + - value - the value to set + - overwrite (optional, default is False) + + Returns: + - nothing + + """ + + if not config.has_section(section): + config.add_section(section) + config.set(section,var,value) + else: + if not config.has_option(section,var): + config.set(section,var,value) + else: + if overwrite: + config.set(section, var, value) + +def get_geotransform(filename): + ''' Return geotransform of dataset''' + ds = gdal.Open(filename, GA_ReadOnly) + gt = ds.GetGeoTransform() + return gt + +def get_extent(filename): + ''' Return list of corner coordinates from a dataset''' + ds = gdal.Open(filename, GA_ReadOnly) + gt = ds.GetGeoTransform() + # 'top left x', 'w-e pixel resolution', '0', 'top left y', '0', 'n-s pixel resolution (negative value)' + nx, ny = ds.RasterXSize, ds.RasterYSize + xmin = np.float64(gt[0]) + ymin = np.float64(gt[3]) +np.float64(ny) * np.float64(gt[5]) + xmax = np.float64(gt[0]) + np.float64(nx) * np.float64(gt[1]) + ymax = np.float64(gt[3]) + ds = None + return xmin, ymin, xmax, ymax + +def get_projection(filename): + ds = gdal.Open(filename, GA_ReadOnly) + WktString = ds.GetProjection() + srs = osr.SpatialReference() + srs.ImportFromWkt(WktString) + ds = None + return srs + +def round_extent(extent, snap, prec): + """Increases the extent until all sides lie on a coordinate + divideable by 'snap'.""" + xmin, ymin, xmax, ymax = extent + snap = float(snap) # prevent integer division issues + xmin = round(np.ceil(xmin/snap)*snap, prec) + ymin = round(np.ceil(ymin/snap)*snap, prec) + xmax = round(np.floor(xmax/snap)*snap, prec) + ymax = round(np.floor(ymax/snap)*snap, prec) + return xmin, ymin, xmax, ymax + +def DeleteShapes(shapes): + shapelist = list(shapes) + for shape in shapelist: + if os.path.exists(shape): + Driver.DeleteDataSource(shape) + print "shapefile deleted: " + shape + +def MergeShapes(shapesin, Layer): + for SHP in shapesin: + if os.path.exists(SHP): + ATT = os.path.splitext(os.path.basename(SHP))[0] + DATA = ogr.Open(SHP) + LYR = DATA.GetLayerByName(ATT) + LYR.ResetReading() + for idx, i in enumerate(range(LYR.GetFeatureCount())): + oldfeature = LYR.GetFeature(i) + geometry = oldfeature.geometry() + feature = ogr.Feature(Layer.GetLayerDefn()) + feature.SetGeometry(geometry) + feature.SetField("ID",oldfeature.GetFieldAsString(0)) + Layer.CreateFeature(feature) + DATA.Destroy() + +def readMap(fileName, fileFormat): + """ + Read geographical file into memory + """ + # Open file for binary-reading + mapFormat = gdal.GetDriverByName(fileFormat) + mapFormat.Register() + ds = gdal.Open(fileName) + if ds is None: + print 'Could not open ' + fileName + '. Something went wrong!! Shutting down' + sys.exit(1) + # Retrieve geoTransform info + geotrans = ds.GetGeoTransform() + originX = geotrans[0] + originY = geotrans[3] + resX = geotrans[1] + resY = geotrans[5] + cols = ds.RasterXSize + rows = ds.RasterYSize + x = np.linspace(originX+resX/2,originX+resX/2+resX*(cols-1),cols) + y = np.linspace(originY+resY/2,originY+resY/2+resY*(rows-1),rows) + # Retrieve raster + RasterBand = ds.GetRasterBand(1) # there's only 1 band, starting from 1 + data = RasterBand.ReadAsArray(0,0,cols,rows) + FillVal = RasterBand.GetNoDataValue() + RasterBand = None + ds = None + return x, y, data, FillVal + +def Reach2Nodes(SHP,EPSG,toll,storedir): + if not EPSG == None: + srs.ImportFromEPSG(int(EPSG)) + SHP_ATT = os.path.splitext(os.path.basename(SHP))[0] + END_SHP = storedir + SHP_ATT + "_end.shp" + START_SHP = storedir + SHP_ATT + "_start.shp" + CONN_SHP = storedir + SHP_ATT + "_connection.shp" + + DeleteShapes([END_SHP,START_SHP,CONN_SHP]) + + fieldDef = ogr.FieldDefn("ID", ogr.OFTString) + fieldDef.SetWidth(12) + + END_out = Driver.CreateDataSource(END_SHP) + END_ATT = os.path.splitext(os.path.basename(END_SHP))[0] + if not EPSG == None: + END_LYR = END_out.CreateLayer(END_ATT, srs, geom_type=ogr.wkbPoint) + else: END_LYR = END_out.CreateLayer(END_ATT, geom_type=ogr.wkbPoint) + END_LYR.CreateField(fieldDef) + + START_out = Driver.CreateDataSource(START_SHP) + START_ATT = os.path.splitext(os.path.basename(START_SHP))[0] + if not EPSG == None: + START_LYR = START_out.CreateLayer(START_ATT, srs, geom_type=ogr.wkbPoint) + else: START_LYR = START_out.CreateLayer(START_ATT, geom_type=ogr.wkbPoint) + START_LYR.CreateField(fieldDef) + + CONN_out = Driver.CreateDataSource(CONN_SHP) + CONN_ATT = os.path.splitext(os.path.basename(CONN_SHP))[0] + if not EPSG == None: + CONN_LYR = CONN_out.CreateLayer(CONN_ATT, srs, geom_type=ogr.wkbPoint) + else: CONN_LYR = CONN_out.CreateLayer(CONN_ATT, geom_type=ogr.wkbPoint) + CONN_LYR.CreateField(fieldDef) + + StartCoord = [] + EndCoord = [] + + ATT = os.path.splitext(os.path.basename(SHP))[0] + DATA = ogr.Open(SHP) + LYR = DATA.GetLayerByName(ATT) + LYR.ResetReading() + for i in range(LYR.GetFeatureCount()): + feature = LYR.GetFeature(i) + geometry = feature.geometry() + StartCoord.append([geometry.GetX(0),geometry.GetY(0)]) + points = geometry.GetPointCount() + EndCoord.append([geometry.GetX(points-1),geometry.GetY(points-1)]) + + DATA.Destroy() + + Connections = [[np.nan,np.nan],[np.nan,np.nan]] + + for i in range(len(StartCoord)): + if not True in np.all(np.isclose(StartCoord[i],EndCoord,rtol=0,atol=toll),axis=1): + #point is startpoint + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(StartCoord[i][0],StartCoord[i][1]) + feature = ogr.Feature(START_LYR.GetLayerDefn()) + feature.SetGeometry(point) + START_LYR.CreateFeature(feature) + else: + #point is a connection + if not True in np.all(np.isclose(StartCoord[i],Connections,rtol=0,atol=toll),axis=1): + Connections.append(StartCoord[i]) + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(StartCoord[i][0],StartCoord[i][1]) + feature = ogr.Feature(CONN_LYR.GetLayerDefn()) + feature.SetGeometry(point) + CONN_LYR.CreateFeature(feature) + if not True in np.all(np.isclose(EndCoord[i],StartCoord,rtol=0,atol=toll),axis=1): + #point is end + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(EndCoord[i][0],EndCoord[i][1]) + feature = ogr.Feature(END_LYR.GetLayerDefn()) + feature.SetGeometry(point) + END_LYR.CreateFeature(feature) + else: + #point is a connection + if not True in np.all(np.isclose(EndCoord[i],Connections,rtol=0,atol=toll),axis=1): + Connections.append(EndCoord[i]) + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(EndCoord[i][0],EndCoord[i][1]) + feature = ogr.Feature(CONN_LYR.GetLayerDefn()) + feature.SetGeometry(point) + CONN_LYR.CreateFeature(feature) + + END_out.Destroy() + START_out.Destroy() + CONN_out.Destroy() + return START_SHP, END_SHP, CONN_SHP + +def ReachOrder(SHP,EPSG,toll,storedir): + if not EPSG == None: + srs.ImportFromEPSG(int(EPSG)) + SHP_ATT = os.path.splitext(os.path.basename(SHP))[0] + ORDER_SHP = storedir + SHP_ATT + "_order1.shp" + FileOrder = [] + FileOrder.append(ORDER_SHP) + + if os.path.exists(ORDER_SHP): + Driver.DeleteDataSource(ORDER_SHP) + + IDField = ogr.FieldDefn("ID", ogr.OFTString) + IDField.SetWidth(12) + + OrderField = ogr.FieldDefn("ORDER", ogr.OFTString) + OrderField.SetWidth(12) + + StartCoord = [] + EndCoord = [] + + ATT = os.path.splitext(os.path.basename(SHP))[0] + DATA = ogr.Open(SHP) + LYR = DATA.GetLayerByName(ATT) + LYR.ResetReading() + for i in range(LYR.GetFeatureCount()): + feature = LYR.GetFeature(i) + geometry = feature.geometry() + StartCoord.append([geometry.GetX(0),geometry.GetY(0)]) + points = geometry.GetPointCount() + EndCoord.append([geometry.GetX(points-1),geometry.GetY(points-1)]) + EndCoord_np = np.array(EndCoord) + reaches = copy.deepcopy(i)+1 + ReachIDs = range(reaches) + ReachOrders = np.array([None] * len(ReachIDs)) + + order = 1 + ordercoord = copy.deepcopy(EndCoord) + tempcoord = [] + endpoints = 0 + for i in ReachIDs: + ReachStart = StartCoord[i] + ReachEnd = EndCoord[i] + if not True in np.all(np.isclose(ReachStart,ordercoord,rtol=0,atol=toll), axis=1): + ReachOrders[i] = order + tempcoord.append(ReachEnd) + if not True in np.all(np.isclose(ReachEnd,StartCoord,rtol=0,atol=toll),axis=1): + endpoints += 1 + ordercoord = copy.deepcopy(tempcoord) + order += 1 + iterations = 0 + + while None in list(ReachOrders) and iterations <= 100: + iterations += 1 + OrderMove = False + for i in ReachIDs: + if ReachOrders[i] == None: + ReachStart = StartCoord[i] + ReachEnd = EndCoord[i] + OrderSelect = ReachOrders[np.all(np.isclose(ReachStart,EndCoord_np,rtol=0,atol=toll), axis=1)] + if not None in list(OrderSelect): + if all(x == list(OrderSelect)[0] for x in list(OrderSelect)) == True: + OrderMove = True + ReachOrders[i] = order + else: + ReachOrders[i] = int(np.max(OrderSelect)) + if OrderMove: + order += 1 + + if None in list(ReachOrders): + print "Conversion of river to orders failed. Try to use a smaller tollerance" + #sys.exit(1) + + LYR.ResetReading() + + for i in range(1,max(ReachOrders)+1): + order = i + ORDER_SHP = storedir + SHP_ATT + "_order" + str(order)+".shp" + FileOrder.append(ORDER_SHP) + if os.path.exists(ORDER_SHP): + Driver.DeleteDataSource(ORDER_SHP) + ORDER_out = Driver.CreateDataSource(ORDER_SHP) + ORDER_ATT = os.path.splitext(os.path.basename(ORDER_SHP))[0] + if not EPSG == None: + ORDER_LYR = ORDER_out.CreateLayer(ORDER_ATT, srs, geom_type=ogr.wkbLineString) + else: ORDER_LYR = ORDER_out.CreateLayer(ORDER_ATT, geom_type=ogr.wkbLineString) + ORDER_LYR.CreateField(IDField) + ORDER_LYR.CreateField(OrderField) + for j in range(LYR.GetFeatureCount()): + if ReachOrders[j] == order: + orgfeature = LYR.GetFeature(j) + geometry = orgfeature.geometry() + feature = ogr.Feature(ORDER_LYR.GetLayerDefn()) + feature.SetGeometry(geometry) + feature.SetField("ID",str(j)) + feature.SetField("ORDER",str(order)) + ORDER_LYR.CreateFeature(feature) + ORDER_out.Destroy() + + DATA.Destroy() + return FileOrder + +def Burn2Tif(shapes,attribute,TIFF): + for shape in shapes: + shape_att = os.path.splitext(os.path.basename(shape))[0] + os.system("gdal_rasterize -a " + str(attribute) + " -l " + shape_att + " " + shape + " " + TIFF) + +def ReverseMap(MAP): + MAX = int(np.max(pcr.pcr2numpy(MAP,np.NAN))) + REV_MAP = pcr.ordinal(pcr.ifthen(pcr.scalar(MAP) == pcr.scalar(-9999), pcr.scalar(0))) + for i in range(MAX+1): + if i > 0: + print i + REV_MAP = pcr.cover(pcr.ifthen(pcr.ordinal(MAP) == pcr.ordinal(i), pcr.ordinal(pcr.scalar(MAX+1)-pcr.scalar(i))),REV_MAP) + REV_MAP = pcr.cover(REV_MAP, pcr.ordinal(MAP)) + return REV_MAP + +def DeleteList(itemlist, logger=logging): + for item in itemlist: + logger.info('file deleted: {:s}'.format(item)) + os.remove(item) + +def Tiff2Point(TIFF): + DS = gdal.Open(TIFF,GA_ReadOnly) + if DS is None: + print 'Could not open ' + fn + sys.exit(1) + + cols = DS.RasterXSize + rows = DS.RasterYSize + geotransform = DS.GetGeoTransform() + originX = geotransform[0] + originY = geotransform[3] + pixelWidth = geotransform[1] + pixelHeight = geotransform[5] + + band = DS.GetRasterBand(1) + NoData = band.GetNoDataValue() + + ATT = os.path.splitext(os.path.basename(TIFF))[0] + SHP = ATT + ".shp" + + if os.path.exists(SHP): + Driver.DeleteDataSource(SHP) + + SHP_out = Driver.CreateDataSource(SHP) + SHP_LYR = SHP_out.CreateLayer(ATT, geom_type=ogr.wkbPoint) + + fieldDef = ogr.FieldDefn("ID", ogr.OFTInteger) + fieldDef.SetWidth(12) + SHP_LYR.CreateField(fieldDef) + + fieldDef = ogr.FieldDefn("X", ogr.OFTReal) + fieldDef.SetWidth(20) + fieldDef.SetPrecision(5) + SHP_LYR.CreateField(fieldDef) + + fieldDef = ogr.FieldDefn("Y", ogr.OFTReal) + fieldDef.SetWidth(20) + fieldDef.SetPrecision(5) + SHP_LYR.CreateField(fieldDef) + + for x in range(cols): + for y in range(rows): + value = band.ReadAsArray(x, y, 1, 1) + if not value == NoData: + xCoord = originX + (0.5 + x)*pixelWidth + yCoord = originY + (y+0.5)*pixelHeight + point = ogr.Geometry(ogr.wkbPoint) + point.AddPoint(xCoord,yCoord) + feat_out = ogr.Feature(SHP_LYR.GetLayerDefn()) + feat_out.SetGeometry(point) + feat_out.SetField("ID", str(int(value[0][0]))) + feat_out.SetField("X", xCoord) + feat_out.SetField("Y", yCoord) + SHP_LYR.CreateFeature(feat_out) + + SHP_out.Destroy() + DS = None + +def GridDef(TIFF,XML): + DS = gdal.Open(TIFF,GA_ReadOnly) + if DS is None: + print 'Could not open ' + fn + sys.exit(1) + + cols = DS.RasterXSize + rows = DS.RasterYSize + geotransform = DS.GetGeoTransform() + originX = geotransform[0] + originY = geotransform[3] + pixelWidth = geotransform[1] + pixelHeight = geotransform[5] + DS = None + Grid_xml = open(XML, 'w+') + Grid_xml.write('\n') + Grid_xml.write('\t'+str(rows)+'\n') + Grid_xml.write('\t'+str(cols)+'\n') + Grid_xml.write('\tGEODATUM\n') + Grid_xml.write('\t\n') + Grid_xml.write('\t\t'+str(originX + 0.5*pixelWidth)+'\n') + Grid_xml.write('\t\t'+str(originY + 0.5*pixelHeight)+'\n') + Grid_xml.write('\t\n') + Grid_xml.write('\t'+str(pixelWidth)+'\n') + Grid_xml.write('\t'+str(pixelWidth)+'\n') + Grid_xml.write('\n') + +def PCR_river2Shape(rivermap, drainmap, ordermap,lddmap,SHP_FILENAME,catchmentmap,srs=None): +# rivermap = riversid_map +# drainmap = drain_map +# ordermap = streamorder_map +# lddmap = ldd_map +# SHP_FILENAME = rivshp + counter = 0. + percentage = 0. + file_att = os.path.splitext(os.path.basename(SHP_FILENAME))[0] + x, y, riversid, FillVal = readMap(rivermap, 'PCRaster');riversid[riversid==FillVal]= -1 + x, y, strahlerorder, FillVal = readMap(ordermap, 'PCRaster');strahlerorder[strahlerorder==FillVal]= -1 + x, y, catchment, FillVal = readMap(catchmentmap, 'PCRaster');catchment[catchment==FillVal]= -1 + x, y, drain, FillVal = readMap(drainmap, 'PCRaster');drain[drain==FillVal]= np.nan + x, y, ldd, FillVal = readMap(lddmap, 'PCRaster'); + xi,yi = np.meshgrid(x,y) + + # mesh of surrounding pixels + xi_window, yi_window = np.meshgrid(range(-1,2),range(-1,2)) + # mesh of ldd grid values + ldd_values = np.array([[7, 8, 9],[4, 5, 6],[1, 2, 3]]) + [iiy,iix] = np.where(riversid>0) + riverId = riversid[iiy,iix] + maxRiverId = riverId.max() + + # Create new shapefile + ogr.UseExceptions() + ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource(SHP_FILENAME) + layer_line = ds.CreateLayer(file_att, srs, ogr.wkbLineString) + + river_ID = ogr.FieldDefn() + river_ID.SetName('ORDER') + river_ID.SetType(ogr.OFTInteger) + river_ID.SetWidth(6) + layer_line.CreateField(river_ID) + + river_ID = ogr.FieldDefn() + river_ID.SetName('CATCHMENT') + river_ID.SetType(ogr.OFTInteger) + river_ID.SetWidth(6) + layer_line.CreateField(river_ID) + + # Create a new line geometry per river segment + for id in np.arange(1,maxRiverId + 1): + #for id in range(25,26): + #print 'Writing line element "' + str(id) + '"' + y_idx, x_idx = np.where(riversid == id) + drain_idx = drain[y_idx, x_idx] + lat_select = yi[y_idx, x_idx] + lon_select = xi[y_idx, x_idx] + strahlerorder_select = strahlerorder[y_idx, x_idx] + catchment_select = catchment[y_idx, x_idx] + order = drain_idx.argsort() + lat_select = lat_select[order] + lon_select = lon_select[order] + catchment_select = catchment_select[order] + strahlerorder_select = strahlerorder_select[order] + line = ogr.Geometry(type=ogr.wkbLineString) + # add points sequentially to line segment + for nr in range(0,len(lat_select)): + #line_latlon.AddPoint(np.float64(lon_select[nr]), np.float64(lat_select[nr])) + line.AddPoint(np.float64(lon_select[nr]), np.float64(lat_select[nr])) + # now find the point downstream of the last pixel from the ldd, which is connected with the downstream river + try: + xi_select = xi[y_idx[order][-1] + yi_window, x_idx[order][-1] + xi_window] + yi_select = yi[y_idx[order][-1] + yi_window, x_idx[order][-1] + xi_window] + ldd_at_pos = ldd[y_idx[order][-1], x_idx[order][-1]] + ldd_y, ldd_x = np.where(ldd_values==ldd_at_pos) + downstream_y = yi_select[ldd_y, ldd_x] + downstream_x = xi_select[ldd_y, ldd_x] + line.AddPoint(np.float64(downstream_x), np.float64(downstream_y)) + except: + continue + # most downstream point of segment is on the boundary of the map, so skip this step + #print 'River segment id: %g is on boundary of the map' % id + # Add line as a new feature to the shapefiles + feature = ogr.Feature(feature_def=layer_line.GetLayerDefn()) + feature.SetGeometryDirectly(line) + feature.SetField('ORDER', int(strahlerorder_select[0])) + feature.SetField('CATCHMENT', int(catchment_select[0])) + counter = counter + 1 + if (float(id)/float(maxRiverId))*100. > percentage: + logger.info(' ' + str(int(percentage)) + '% completed') + percentage = percentage + 10. + #print 'Writing polyline ' + str(id) + ' of ' + str(maxRiverId) + layer_line.CreateFeature(feature) + # Cleanup + feature.Destroy() + ds.Destroy() + +def snaptomap(points,mmap): + """ + Snap the points in _points_ to nearest non missing + values in _mmap_. Can be used to move gauge locations + to the nearest rivers. + + Input: + - points - map with points to move + - mmap - map with points to move to + + Return: + - map with shifted points + """ + points = pcr.cover(points,0) + # Create unique id map of mmap cells + unq = pcr.nominal(pcr.cover(pcr.uniqueid(pcr.defined(mmap)),pcr.scalar(0.0))) + # Now fill holes in mmap map with lues indicating the closes mmap cell. + dist_cellid = pcr.scalar(pcr.spreadzone(unq,0,1)) + # Get map with values at location in points with closes mmap cell + dist_cellid = pcr.ifthenelse(points > 0, dist_cellid, 0) + # Spread this out + dist_fill = pcr.spreadzone(pcr.nominal(dist_cellid),0,1) + # Find the new (moved) locations + npt = pcr.uniqueid(pcr.boolean(pcr.ifthen(dist_fill == unq, unq))) + # Now recreate the original value in the points maps + ptcover = pcr.spreadzone(pcr.cover(points,0),0,1) + # Now get the org point value in the pt map + nptorg = pcr.ifthen(npt > 0, ptcover) + + return nptorg + +def Raster2Pol(rasterin,shapeout,srs=None,ID='ID'): +# rasterin = catchments_tif +# shapeout = catchments_shp +# ID = 'ID' + sourceRaster = gdal.Open(rasterin) + band = sourceRaster.GetRasterBand(1) + NoData = band.GetNoDataValue() + file_att = os.path.splitext(os.path.basename(shapeout))[0] + driver = ogr.GetDriverByName("ESRI Shapefile") + if os.path.exists(shapeout): + driver.DeleteDataSource(shapeout) + outDatasource = driver.CreateDataSource(shapeout) + outLayer = outDatasource.CreateLayer(file_att, srs) + newField = ogr.FieldDefn(ID, ogr.OFTInteger) + outLayer.CreateField(newField) + gdal.Polygonize(band, band, outLayer, 0, [], callback=None ) + outDatasource.Destroy() + sourceRaster = None + +def windowstats(rasterin, t_rows, t_columns, t_geotransform, t_srs, rasterout, stat=50, transform=False, logger=logging): + """ + :param rasterin: original DEM data + :param t_rows: number of rows in the final maps + :param t_columns: number of columns in the final maps + :param t_geotransform: geotransform of final maps + :param t_srs: srs of final maps + :param rasterout: output file + :param stat: percentile to compute + :param transform: + :param logger: + :return: + """ + # read properties of input raster + #print transform + if isinstance(stat, str): + stat = np.array([stat]) + ds_in = gdal.Open(rasterin,GA_ReadOnly) + in_trans = ds_in.GetGeoTransform() + cellsize_in_x = in_trans[1] + cellsize_in_y = in_trans[5] + cellsurf_in = np.abs(cellsize_in_x*cellsize_in_y) + xorg_in = in_trans[0] + yorg_in = in_trans[3] + # read properties of output raster + cellsize_out_x = t_geotransform[1] + cellsize_out_y = t_geotransform[5] + cellsurf_out = np.abs(cellsize_out_x*cellsize_out_y) + xorg = t_geotransform[0] + yorg = t_geotransform[3] + #compute statistics to new data set + band_in = ds_in.GetRasterBand(1) + nodata = band_in.GetNoDataValue() + array_out = np.ones((t_rows, t_columns), dtype=np.float) * nodata + blocks = t_rows * t_columns + counter = 0 + percentage = 0. + for row in range(t_rows): + for col in range(t_columns): + counter = counter + 1 + if (float(counter)/float(blocks))*100. > percentage: + logger.debug(' ' + str(int(percentage)) + '% completed') + percentage = percentage + 10. + xl = xorg + (col * cellsize_out_x) + #print xl + xr = xorg + ((col + 1) * cellsize_out_x) + yt = yorg - (row * -cellsize_out_y) + yb = yorg - ((row + 1) * -cellsize_out_y) + if not transform: + coordtl = (xl,yt) + coordbr = (xr,yb) + else: + coordtl = transform.TransformPoint(xl,yt) + coordbr = transform.TransformPoint(xr,yb) + xOffset = int((coordtl[0] - xorg_in) / cellsize_in_x) + yOffset = int((coordtl[1] - yorg_in) / cellsize_in_y) + xBlock = int((coordbr[0] - coordtl[0])/cellsize_in_x + 1) + yBlock = int((coordtl[1] - coordbr[1])/-cellsize_in_y + 1) + #print xOffset, yOffset, xBlock, yBlock + data_block = band_in.ReadAsArray(xOffset, yOffset, xBlock, yBlock) + # remove missing values + data_block = np.ma.masked_equal(data_block, nodata) + if stat == 'fact': + # this one computes the celllength multiplier to estimate river lengths + array_out[row, col] = np.maximum(np.sum(data_block)*np.float(cellsize_in_x)/cellsize_out_x, 1) + elif stat == 'sum': + array_out[row, col] = np.sum(data_block) + else: + array_out[row, col] = np.percentile(data_block, int(stat)) + # names = [] + # #write rasters + # for idx, perc in enumerate(stat): + # if type(perc) is str: + # if perc == 'frac': + # name = 'frac' + # #print 'computing window fraction' + # name_map = os.path.join(resultdir, 'wflow_riverlength_fact.map') + # elif perc == 'sum': + # name = 'sum' + # #print 'computing window sum' + # name_map = os.path.join(resultdir, 'windowsum.map') + # else: + # if perc >= 100: + # name = 'max' + # #print 'computing window maximum' + # name_map = os.path.join(resultdir, 'wflow_dem_{:s}.map'.format(name)) + # elif perc <= 0: + # name = 'min' + # #print 'computing window minimum' + # name_map = os.path.join(resultdir, 'wflow_dem_{:s}.map'.format(name)) + # else: + # logger.info('computing window {:d} percentile'.format(int(perc))) + # name_map = os.path.join(resultdir, 'wflow_dem_{:03d}.map'.format(int(perc))) + # names.append(name) + # replace nans by fill_value + array_out[np.isnan(array_out)] = nodata + # name_tif = 'work\\dem_' + name + '.tif' + ds_out = gdal.GetDriverByName('MEM').Create('', t_columns, t_rows, 1, GDT_Float32) + ds_out.SetGeoTransform(t_geotransform) + ds_out.SetProjection(t_srs.ExportToWkt()) + band_out = ds_out.GetRasterBand(1) + band_out.WriteArray(array_out[:, :], 0, 0) + band_out.SetNoDataValue(nodata) + histogram = band_out.GetDefaultHistogram() + band_out.SetDefaultHistogram(histogram[0],histogram[1], histogram[3]) + ds_in = None + gdal.GetDriverByName('PCRaster').CreateCopy(rasterout, ds_out, 0) + ds_out = None + + # call(('gdal_translate','-of','PCRaster','-ot','Float32', name_tif,name_map)) + # return names + +def CreateTif(TIF,rows,columns,geotransform,srs,fill=-9999): + ds = gdal.GetDriverByName('GTiff').Create(TIF, columns,rows, 1, GDT_Float32) + ds.SetGeoTransform(geotransform) + ds.SetProjection(srs.ExportToWkt()) + band = ds.GetRasterBand(1) + array = np.ones((rows,columns),dtype=np.float) * fill + band.WriteArray(array, 0, 0) + band.SetNoDataValue(fill) + ds = None + +def GetRasterTranform(rasterin,srsout): + ds = gdal.Open(rasterin,GA_ReadOnly) + spatialref = None + transform = False + EPSG = False + spatialref = ds.GetProjection() + if not spatialref == None: + srsin = osr.SpatialReference() + srsin.ImportFromWkt(spatialref) + srsin.AutoIdentifyEPSG() + EPSG = 'EPSG:'+srsin.GetAuthorityCode(None) + transform = osr.CoordinateTransformation(srsin,srsout) + return transform, EPSG \ No newline at end of file