diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4a3b4ddf232066609eb8edabd990df177a0466ee..07bb8aa7adcfc8d07067871adf66f34a17524050 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,5 +1,5 @@ # The Docker image that will be used to build your app -image: python:3.11 +image: python:latest-slim # Functions that should be executed before the build script is run before_script: @@ -18,7 +18,7 @@ pages: when: always script: - cd doc - - make html + - sphinx-build source build/html - cd .. - mkdir public - cp -r doc/build/html/* public diff --git a/doc/source/documentation.rst b/doc/source/documentation.rst index ec1ecfee41907a73395a8722de44fd425451c92b..0b342ecf19bc79fda1c78d894305f854d1ed2cc0 100644 --- a/doc/source/documentation.rst +++ b/doc/source/documentation.rst @@ -283,7 +283,7 @@ This should now be done when code is changed using a '*pipeline*'. .. sourcecode:: yaml # The Docker image that will be used to build your app - image: python:3.11 + image: python:latest-slim # Functions that should be executed before the build script is run before_script: @@ -300,7 +300,7 @@ This should now be done when code is changed using a '*pipeline*'. when: always script: - cd doc - - make html + - sphinx-build source build/html - cd .. - mkdir public - cp -r doc/build/html/* public diff --git a/src/cso/__init__.py b/src/cso/__init__.py index 21f4e8fc59323a878f1b9ad1ee24ea7b3ee33131..bdeab0b777947b9944acc4cea5f6ff2fa8c39d9c 100644 --- a/src/cso/__init__.py +++ b/src/cso/__init__.py @@ -120,18 +120,21 @@ and are defined according to the following hierchy: # store version number: try: from importlib import metadata + __version__ = metadata.version(__package__) except: import os + if "CSO_PREFIX" in os.environ.keys(): import tomllib - with open(os.path.join(os.environ["CSO_PREFIX"],"pyproject.toml"),"rb") as f : - pp = tomllib.load( f ) + + with open(os.path.join(os.environ["CSO_PREFIX"], "pyproject.toml"), "rb") as f: + pp = tomllib.load(f) __version__ = pp["project"]["version"] else: __version__ = "x.y" - #endif -#endtry + # endif +# endtry # import entities from sub-modules: from .cso_file import * diff --git a/src/cso/cso_colocate.py b/src/cso/cso_colocate.py index 3a84e8b347e2b7ac1e02172c9f634d21eaae9568..53da05cdf71e56c7ecf4eac33f3b43a44721a869 100644 --- a/src/cso/cso_colocate.py +++ b/src/cso/cso_colocate.py @@ -13,7 +13,7 @@ # # 2025-04, Arjo Segers # Changed imports for python packaging. -# +# ######################################################################## diff --git a/src/cso/cso_dataspace.py b/src/cso/cso_dataspace.py index 7f43c9443654c4be5d3f8734351042b58397d5a0..405f017d94139638a21adae8514978f8f3b487cd 100644 --- a/src/cso/cso_dataspace.py +++ b/src/cso/cso_dataspace.py @@ -42,7 +42,7 @@ # # 2025-04, Arjo Segers # Changed imports for python packaging. -# +# ######################################################################## diff --git a/src/cso/cso_earthaccess.py b/src/cso/cso_earthaccess.py index 35397f053e20cf966d590610d367d614c6fd1807..0e822691bd22736918ccd7239d6a8242f777645c 100644 --- a/src/cso/cso_earthaccess.py +++ b/src/cso/cso_earthaccess.py @@ -787,7 +787,7 @@ class CSO_EarthAccess_Download_Listing(utopya.UtopyaRc): logging.info(f"{indent} scan base directory: %s ..." % bdir) # create directory if necessary: - cso_file.CheckDir( lst_file, dmode=dmode ) + cso_file.CheckDir(lst_file, dmode=dmode) # initiallize for (re)creation: listing = cso_file.CSO_Listing(indent=f"{indent} ") @@ -799,51 +799,48 @@ class CSO_EarthAccess_Download_Listing(utopya.UtopyaRc): for root, dirs, files in os.walk(bdir): # loop over files: for fname in files: - # subdir relative to listing file: subdir = os.path.relpath(root, start=bdir) # info ... - if subdir not in subdirs : + if subdir not in subdirs: # info ... logging.info(f"{indent} {subdir} ...") # store: subdirs.append(subdir) - #endif + # endif ## testing .. - #if subdir != "2022/007": + # if subdir != "2022/007": # #logging.warning(f"{indent} skip ...") # continue ##endif # data file? if fnmatch.fnmatch(fname, fpattern): - # expected filenames: # AERDB_L2_VIIRS_SNPP.A2022001.0342.002.2023076013614.nc parts = fname.split(".") if len(parts) == 6: - # second is year-julday, strip the "A" of acquisition: try: - t1 = datetime.datetime.strptime(parts[1][1:],"%Y%j") + t1 = datetime.datetime.strptime(parts[1][1:], "%Y%j") except: logging.error(f"could not extract date from '{parts[1]}'") raise Exception - #endtry + # endtry # end time: t2 = t1 + datetime.timedelta(1) - else : + else: logging.error(f"unsupported filename: {fname}") raise Exception # endif # open for extra info: - sfile = cso_file.CSO_File( os.path.join(root,fname) ) + sfile = cso_file.CSO_File(os.path.join(root, fname)) # extract attributes: - orbit = sfile.GetAttr( "OrbitNumber" ) + orbit = sfile.GetAttr("OrbitNumber") # done: sfile.Close() @@ -854,22 +851,24 @@ class CSO_EarthAccess_Download_Listing(utopya.UtopyaRc): data["orbit"] = orbit # update record: - listing.UpdateRecord( os.path.join(subdir,fname), data, indent=f"{indent} ") + listing.UpdateRecord( + os.path.join(subdir, fname), data, indent=f"{indent} " + ) # endfor # filename match # endfor # filenames ## testing ... - #if len(listing) > 10 : + # if len(listing) > 10 : # break # endfor # walk over subdirs/files # adhoc .. - listing.df = listing.df.astype( { "orbit" : int } ) + listing.df = listing.df.astype({"orbit": int}) # sort on filename: - listing.Sort( by="orbit" ) + listing.Sort(by="orbit") # save: listing.Save(lst_file, dmode=dmode, indent=f"{indent} ") diff --git a/src/cso/cso_file.py b/src/cso/cso_file.py index eb296075f76e82122350e845ca180f78e09e7cdb..786c125221ea56a93710dab281a16d75e7f53b54 100644 --- a/src/cso/cso_file.py +++ b/src/cso/cso_file.py @@ -202,7 +202,7 @@ def Pack_DataArray(da, dtype="i2"): # only floats ... if da.dtype in [numpy.float32, numpy.float64]: # any values defined? - if numpy.any( ~ numpy.isnan(da.values) ) : + if numpy.any(~numpy.isnan(da.values)): # value range, ignore nan's: vmin = numpy.nanmin(da.values) vmax = numpy.nanmax(da.values) @@ -210,7 +210,7 @@ def Pack_DataArray(da, dtype="i2"): # dummy range: vmin = 0.0 vmax = 0.0 - #endif + # endif # target data type could be integer or float: if dtype.startswith("i"): # use absolute minimum to represent nans: @@ -290,15 +290,15 @@ class CSO_File(object): # open file: try: - # The VIIRS files have time stored in "seconds", + # The VIIRS files have time stored in "seconds", # with an attribute "long_name" to define the start time; # ensure that this is decoded into timedelta's without any warnings: - self.ds = xarray.open_dataset(self.filename, decode_timedelta=True ) + self.ds = xarray.open_dataset(self.filename, decode_timedelta=True) except: logging.error(f"could not open (corrupted?) file: {self.filename}") raise - #endtry - + # endtry + else: # dummy: self.filename = "None" @@ -636,7 +636,9 @@ class CSO_File(object): for vname in self.ds.keys(): # only numerical values: dtype = self.ds[vname].dtype - if numpy.issubdtype(dtype,numpy.integer) or numpy.issubdtype(dtype,numpy.floating): + if numpy.issubdtype(dtype, numpy.integer) or numpy.issubdtype( + dtype, numpy.floating + ): # enable zlib compression and set level: self.ds[vname].encoding["zlib"] = True self.ds[vname].encoding["complevel"] = complevel @@ -1313,7 +1315,6 @@ class CSO_Listing(object): # directory name: if self.filename is not None: - # check .. if not os.path.isfile(filename): logging.error("listing file not found: %s" % filename) @@ -1323,7 +1324,7 @@ class CSO_Listing(object): # base directory: self.dirname = os.path.dirname(self.filename) # could be empty .. - if len(self.dirname) == 0 : + if len(self.dirname) == 0: self.dirname = os.curdir # endif @@ -1342,7 +1343,6 @@ class CSO_Listing(object): self.df["end_time"] = pandas.to_datetime(self.df["end_time"]) else: - # not defined yet, assume current location: self.dirname = os.curdir @@ -1564,7 +1564,9 @@ class CSO_Listing(object): # * - def Select(self, tr=None, method="overlap", expr=None, blacklist=[], verbose=True, indent="", **kwargs): + def Select( + self, tr=None, method="overlap", expr=None, blacklist=[], verbose=True, indent="", **kwargs + ): """ Return :py:class:`CSO_Listing` object with selection of records. @@ -1674,12 +1676,12 @@ class CSO_Listing(object): if len(selected) > 0: # leave: break - #endif + # endif # endfor # selection criteria # show selection? - if verbose : + if verbose: # info ... logging.info(f"{indent}available records(s):") # loop: @@ -1689,7 +1691,7 @@ class CSO_Listing(object): line = line + " [" + filestatus[fname] + "]" logging.info(f"{indent} {line}") # endfor - #endif # verbose + # endif # verbose # no match? if len(selected) == 0: @@ -1763,7 +1765,7 @@ class CSO_Listing(object): # sort index or values: if by is None: self.df.sort_index(inplace=True) - else : + else: self.df.sort_values(by, inplace=True) # endif diff --git a/src/cso/cso_plot.py b/src/cso/cso_plot.py index c105abd7a392dcd28b6ac2c8caedf7af7bb6c421..3478e23694515235dde014a1a8667461f9597307 100644 --- a/src/cso/cso_plot.py +++ b/src/cso/cso_plot.py @@ -229,11 +229,11 @@ class Figure(object): # current domain: west, east, south, north = domain # extract values or step size: - meris,paras = bkwargs["raster"] + meris, paras = bkwargs["raster"] # draw meridians? if meris is not None: # step size instead of values? - if not hasattr(meris,"__len__"): + if not hasattr(meris, "__len__"): # value is step size: dmeri = meris # offset: @@ -247,7 +247,7 @@ class Figure(object): # draw parallels? if paras is not None: # step size instead of values? - if not hasattr(paras,"__len__"): + if not hasattr(paras, "__len__"): # extract step size: dpara = paras # offset: @@ -270,11 +270,11 @@ class Figure(object): # domain defined? if domain is not None: # map projection? - #~ the extent does not always work correct ..? - #if hasattr(self.ax,"set_extent") : + # ~ the extent does not always work correct ..? + # if hasattr(self.ax,"set_extent") : # self.ax.set_extent(domain) ##endif - #~ just use ax limits ... + # ~ just use ax limits ... self.ax.set_xlim(domain[0:2]) self.ax.set_ylim(domain[2:4]) # endif @@ -529,7 +529,7 @@ class ColorbarFigure(Figure): self.cmap__color_bad = self.cmap.get_bad() # reset to transparant self.cmap.set_bad(alpha=0) - else : + else: # not defined: self.cmap__color_bad = None # endif @@ -661,18 +661,28 @@ class ColorbarFigure(Figure): # purple-white-brown elif colors == "pwb": colors = ["purple", "blue", "cyan", "white", "yellow", "red", "brown"] - # + # # black-white-magenta: - elif colors == "kwm" : - colors = ["black","blue","cyan","lightgreen","white","yellow","orange","red","magenta"] + elif colors == "kwm": + colors = [ + "black", + "blue", + "cyan", + "lightgreen", + "white", + "yellow", + "orange", + "red", + "magenta", + ] # # blue-green-white-yellow-red - elif colors == "bgwyr" : - colors = ["blue","cyan","lightgreen","white","yellow","orange","red"] + elif colors == "bgwyr": + colors = ["blue", "cyan", "lightgreen", "white", "yellow", "orange", "red"] # # white "jet" - elif colors == "wjet" : - colors = ["white","cyan","lightgreen","yellow","orange","red","brown"] + elif colors == "wjet": + colors = ["white", "cyan", "lightgreen", "yellow", "orange", "red", "brown"] # endif # endif @@ -755,19 +765,20 @@ class ColorbarFigure(Figure): # get predefined colormap: if colors.startswith("cmcrameri.cm"): import cmcrameri - self.cmap = getattr(cmcrameri.cm,colors.lstrip("cmcrameri.cm.")) - else : + + self.cmap = getattr(cmcrameri.cm, colors.lstrip("cmcrameri.cm.")) + else: self.cmap = matplotlib.pyplot.get_cmap(colors) # endif # endif # boundaries: - if color_under is not None : + if color_under is not None: self.cmap.set_under(color_under) - if color_over is not None : + if color_over is not None: self.cmap.set_over(color_over) - if color_bad is not None : + if color_bad is not None: self.cmap.set_bad(color_bad) # enddef # Set_ColorMap @@ -812,14 +823,14 @@ class ColorbarFigure(Figure): if vbounds is None: # set value range if not explicitly done: if self.vmin is None: - if hasattr(values,'values') : + if hasattr(values, "values"): self.vmin = numpy.nanmin(values.values) - else : + else: self.vmin = numpy.nanmin(values) if self.vmax is None: - if hasattr(values,'values') : + if hasattr(values, "values"): self.vmax = numpy.nanmax(values.values) - else : + else: self.vmax = numpy.nanmax(values) # trap constant value ... @@ -1049,16 +1060,18 @@ class ColorbarFigure(Figure): # adhoc: add layer with no-data colors if self.cmap__color_bad is not None: # any nan values? - jj,ii = numpy.where( numpy.isnan(cc) ) - if len(jj) > 0 : + jj, ii = numpy.where(numpy.isnan(cc)) + if len(jj) > 0: # init as full nan field: - cc2 = numpy.full( cc.shape, numpy.nan ) + cc2 = numpy.full(cc.shape, numpy.nan) # reset: - cc2[jj,ii] = 1.0 + cc2[jj, ii] = 1.0 # add mask: - self.ax.pcolormesh(xx, yy, cc2, cmap=matplotlib.colors.ListedColormap([self.cmap__color_bad]) ) - #endif - #endif + self.ax.pcolormesh( + xx, yy, cc2, cmap=matplotlib.colors.ListedColormap([self.cmap__color_bad]) + ) + # endif + # endif # add colorbar if not done yet: self.Add_ColorBar(p) @@ -1197,7 +1210,8 @@ class ColorbarFigure(Figure): vmin=vmin, vmax=vmax, vbounds=vbounds, - marker=".", markersize=0, + marker=".", + markersize=0, linewidths=0, ) @@ -1488,24 +1502,24 @@ def GetColorMap(colors=None, color_under=None, color_over=None, color_bad=None, # *** -def MapFigure( domain=None ): - +def MapFigure(domain=None): """ Draw map. - + Optional arguments: - + * ``domain=[west,east,south,north]``, default ``[-180,180,-90,90]`` - + Return value: - + * ``fig`` : :py:class:`Figure` object """ - + # new: fig = Figure() # add ax: - fig.AddAxes( domain=domain, bmp=dict(countries=True) ) + fig.AddAxes(domain=domain, bmp=dict(countries=True)) + # enddef MapFigure @@ -1547,7 +1561,7 @@ def QuickPat( If ``border`` is defined, a border around the 2D field; the argument might contain a dictionary with line properties. - + Other arguments are passed to 'ColorbarFigure' : * ``figsize = (width,height)`` @@ -1563,91 +1577,91 @@ def QuickPat( Return value: * ``fig`` : Figure instance - + If an already existing Figure instance is passed as the ``fig`` argument, the pattern will be added to the plot. """ - + # modules: import numpy - + # tools: from . import cso_tools - + # extract known coordinates: - if hasattr(cc,'coords') : + if hasattr(cc, "coords"): # longitudes: - if x is None : - for key in ["lon","longitude"] : - if key in cc.coords : + if x is None: + for key in ["lon", "longitude"]: + if key in cc.coords: x = cc.coords[key].values # endif # endfor # endif # latitudes: - if y is None : - for key in ["lat","latitude"] : - if key in cc.coords : + if y is None: + for key in ["lat", "latitude"]: + if key in cc.coords: y = cc.coords[key].values # endif # endfor # endif # endif # cc.coords - + # domain not defined yet? - if domain is None : + if domain is None: # map defined? - if type(bmp) == dict : + if type(bmp) == dict: # domain defined? if "domain" in bmp.keys(): domain = bmp["domain"] # endif # endif # endif - + # corner points: - xx = cso_tools.GetCornerGridX( cc.shape, x=x, domain=domain ) - yy = cso_tools.GetCornerGridY( cc.shape, y=y, domain=domain ) + xx = cso_tools.GetCornerGridX(cc.shape, x=x, domain=domain) + yy = cso_tools.GetCornerGridY(cc.shape, y=y, domain=domain) # set domain if not present yet: if domain is None: domain = [xx.min(), xx.max(), yy.min(), yy.max()] - else : - if domain[0] is None : + else: + if domain[0] is None: domain[0] = xx.min() - if domain[1] is None : + if domain[1] is None: domain[1] = xx.max() - if domain[2] is None : + if domain[2] is None: domain[2] = yy.min() - if domain[3] is None : + if domain[3] is None: domain[3] = yy.max() - #endif - + # endif + # replace or add domain in bmp argument: - if type(bmp) == dict : + if type(bmp) == dict: bmp["domain"] = domain - + # symetric range? - if dmax is not None : + if dmax is not None: # range: vmin = -dmax - vmax = dmax + vmax = dmax # colors: - if 'cmap' in kwargs.keys() : - if 'colors' not in kwargs['cmap'].keys() : - kwargs['cmap']['colors'] = 'pwb' - #endif - else : - kwargs['cmap'] = { 'colors' : 'pwb' } - #endif - #endif - + if "cmap" in kwargs.keys(): + if "colors" not in kwargs["cmap"].keys(): + kwargs["cmap"]["colors"] = "pwb" + # endif + else: + kwargs["cmap"] = {"colors": "pwb"} + # endif + # endif + # value range: - if hasattr(cc,'values') : + if hasattr(cc, "values"): cmin = numpy.nanmin(cc.values) cmax = numpy.nanmax(cc.values) - else : + else: cmin = numpy.nanmin(cc) cmax = numpy.nanmax(cc) # endif @@ -1671,10 +1685,10 @@ def QuickPat( # endif # setup figure: - if fig is None : + if fig is None: fig = ColorbarFigure(cbar=cbar, bmp=bmp, domain=domain, title=title, **kwargs) - #endif - + # endif + # TOO SLOW! # a "pcolormesh" on geogrhapical projection does not work very well # for patches over the date line and/or "bad" values, @@ -1683,22 +1697,22 @@ def QuickPat( # add "pcolormesh": fig.PColor(xx, yy, cc, vmin=vmin, vmax=vmax, vbounds=vbounds) - + # add border around pattern? - if border is not None : + if border is not None: # colect points into line: - x = numpy.hstack((xx[0,:],xx[:,-1],numpy.flipud(xx[-1,:]),numpy.flipud(xx[:,0]))) - y = numpy.hstack((yy[0,:],yy[:,-1],numpy.flipud(yy[-1,:]),numpy.flipud(yy[:,0]))) + x = numpy.hstack((xx[0, :], xx[:, -1], numpy.flipud(xx[-1, :]), numpy.flipud(xx[:, 0]))) + y = numpy.hstack((yy[0, :], yy[:, -1], numpy.flipud(yy[-1, :]), numpy.flipud(yy[:, 0]))) # plot: if type(border) == dict: pkwargs = border else: - pkwargs = dict(color="black",linestyle="-") - #endif + pkwargs = dict(color="black", linestyle="-") + # endif # add: - fig.ax.plot( x, y, **pkwargs ) + fig.ax.plot(x, y, **pkwargs) # endif - + # ok return fig diff --git a/src/cso/cso_tools.py b/src/cso/cso_tools.py index f43599bdf011d544d2dff723370706c8718ac15c..c3c0dbbc7adbed1e918d613f2c3e218d42349821 100644 --- a/src/cso/cso_tools.py +++ b/src/cso/cso_tools.py @@ -37,7 +37,7 @@ Methods def mid2bounds(x): """ - Given input ``x`` with shape ``(nx,)``, + Given input ``x`` with shape ``(nx,)``, return array ``xb`` with shape ``(nx+1,)`` with boundary values between and outside the input locations. """ @@ -53,7 +53,7 @@ def mid2bounds(x): # edge: xb[0] = x[0] - 0.5 * (x[1] - x[0]) # mid: - xb[1:nx] = 0.5 * ( numpy.array(x[:nx-1]) + numpy.array(x[1:nx]) ) + xb[1:nx] = 0.5 * (numpy.array(x[: nx - 1]) + numpy.array(x[1:nx])) # end: xb[nx] = x[nx - 1] + 0.5 * (x[nx - 1] - x[nx - 2]) @@ -79,30 +79,30 @@ def mid2corners(xx, dim=None): ny, nx = xx.shape # extend in x-direction? - if (dim is None) or (dim == "x") : + if (dim is None) or (dim == "x"): # intermediate: bxx = numpy.zeros((ny, nx + 1), float) # fill: for iy in range(ny): bxx[iy, :] = mid2bounds(xx[iy, :]) # endfor - else : + else: # create copy: bxx = xx * 1.0 - #endif + # endif # extend in x-direction? - if (dim is None) or (dim == "y") : + if (dim is None) or (dim == "y"): # target: cxx = numpy.zeros((ny + 1, nx + 1), float) # fill: for ix in range(nx + 1): cxx[:, ix] = mid2bounds(bxx[:, ix]) # endfor - else : + else: # create copy: cxx = bxx * 1.0 - #endif + # endif # ok return cxx @@ -113,7 +113,7 @@ def mid2corners(xx, dim=None): # * -def GetCornerGridX( shp, x=None, domain=None, bounds=False ): +def GetCornerGridX(shp, x=None, domain=None, bounds=False): """ Return grid array of shape ``(ny+1,nx+1)`` with 'x' locations of corner points. If ``bounds=True``, the result is a ``(ny,nx,4)`` array. @@ -126,16 +126,16 @@ def GetCornerGridX( shp, x=None, domain=None, bounds=False ): Optional grid definitions: * ``x`` is either 1D or 2D and define the coordinates; supported shapes: - + * 2D corner field with shape ``(ny+1,nx+1)`` ; the result will have this shape too; * 2D mid of edges with shape ``(ny,nx+1)`` ; * 2D mid of cell with shape ``(ny,nx)`` ; * 1D corner points with shape ``(nx+1)``; * 1D mid of cell with shape ``(nx)``. - + * ``domain=(left,right,bottom,top)`` defines the boundaries of the domain; default = ``(0,nx,0,ny)``. - + If ``x`` is not defined, the ``domain`` or its default is used to define a regular grid. """ @@ -151,72 +151,73 @@ def GetCornerGridX( shp, x=None, domain=None, bounds=False ): else: left, right, bottom, top = domain # endif - + # form xx: - if x is None : + if x is None: # regular boundaries: xb = left + numpy.arange(nx + 1) * (right - left) / float(nx) # extend to 2D: - xx = numpy.zeros((ny+1,nx+1)) - for j in range(ny+1) : - xx[j,:] = xb - #endfor + xx = numpy.zeros((ny + 1, nx + 1)) + for j in range(ny + 1): + xx[j, :] = xb + # endfor # - elif x.shape == (ny+1,nx+1) : + elif x.shape == (ny + 1, nx + 1): # copy: xx = x # - elif x.shape == (ny,nx) : + elif x.shape == (ny, nx): # extrapolate both directions: xx = mid2corners(x) # - elif x.shape == (ny,nx+1) : + elif x.shape == (ny, nx + 1): # only in one direction: - xx = mid2corners(x,dim="y") + xx = mid2corners(x, dim="y") # - elif x.shape == (ny+1,nx) : + elif x.shape == (ny + 1, nx): # only in one direction: - xx = mid2corners(x,dim="x") + xx = mid2corners(x, dim="x") # - elif x.shape == (nx+1,): + elif x.shape == (nx + 1,): # extend to 2D: - xx = numpy.zeros((ny+1,nx+1)) - for j in range(ny+1) : - xx[j,:] = x - #endfor + xx = numpy.zeros((ny + 1, nx + 1)) + for j in range(ny + 1): + xx[j, :] = x + # endfor # elif x.shape == (nx,): # form bounds: xb = mid2bounds(x) # extend to 2D: - xx = numpy.zeros((ny+1,nx+1)) - for j in range(ny+1) : - xx[j,:] = xb - #endfor + xx = numpy.zeros((ny + 1, nx + 1)) + for j in range(ny + 1): + xx[j, :] = xb + # endfor # - else : - print( f"ERROR - unsupported x shape {x.shape} for field shape {shp}" ) + else: + print(f"ERROR - unsupported x shape {x.shape} for field shape {shp}") raise Exception - #endif - + # endif + # ok - if bounds : - xxb = numpy.zeros((ny,nx,4)) - xxb[:,:,0] = xx[0:ny ,0:nx ] - xxb[:,:,1] = xx[0:ny ,1:nx+1] - xxb[:,:,2] = xx[1:ny+1,1:nx+1] - xxb[:,:,3] = xx[1:ny+1,0:nx ] + if bounds: + xxb = numpy.zeros((ny, nx, 4)) + xxb[:, :, 0] = xx[0:ny, 0:nx] + xxb[:, :, 1] = xx[0:ny, 1 : nx + 1] + xxb[:, :, 2] = xx[1 : ny + 1, 1 : nx + 1] + xxb[:, :, 3] = xx[1 : ny + 1, 0:nx] return xxb - else : + else: return xx - #endif + # endif + # enddef GetCornerGridX # * -def GetCornerGridY( shp, y=None, domain=None, bounds=False ): +def GetCornerGridY(shp, y=None, domain=None, bounds=False): """ Return grid array of shape ``(ny+1,nx+1)`` with 'x' locations of corner points. If ``bounds=True``, the result is a ``(ny,nx,4)`` array. @@ -228,15 +229,15 @@ def GetCornerGridY( shp, y=None, domain=None, bounds=False ): Optional grid definitions: * ``y`` is either 1D or 2D and define the coordinates; supported shapes: - + * 2D corner field with shape ``(ny+1,nx+1)`` ; the result will have this shape too; * 2D mid of edges with shape ``(ny+1,nx)`` ; * 2D mid of cell with shape ``(ny,nx)`` ; * 1D corner points with shape ``(ny+1)``; * 1D mid of cell with shape ``(ny)``. - + * ``domain=(left,right,bottom,top)`` defines the boundaries of the domain; default = ``(0,nx,0,ny)``. - + If ``y`` is not defined, the ``domain`` or its default is used define a regular grid. """ @@ -252,65 +253,66 @@ def GetCornerGridY( shp, y=None, domain=None, bounds=False ): else: left, right, bottom, top = domain # endif - + # form yy: - if y is None : + if y is None: # regular boundaries: yb = bottom + numpy.arange(ny + 1) * (top - bottom) / float(ny) # extend to 2D: - yy = numpy.zeros((ny+1,nx+1)) - for i in range(nx+1) : - yy[:,i] = yb - #endfor + yy = numpy.zeros((ny + 1, nx + 1)) + for i in range(nx + 1): + yy[:, i] = yb + # endfor # - elif y.shape == (ny+1,nx+1) : + elif y.shape == (ny + 1, nx + 1): # copy: yy = y # - elif y.shape == (ny,nx) : + elif y.shape == (ny, nx): # extrapolate both directions: yy = mid2corners(y) # - elif y.shape == (ny,nx+1) : + elif y.shape == (ny, nx + 1): # only in one direction: - yy = mid2corners(y,dim="y") + yy = mid2corners(y, dim="y") # - elif y.shape == (ny+1,nx) : + elif y.shape == (ny + 1, nx): # only in one direction: - yy = mid2corners(y,dim="x") + yy = mid2corners(y, dim="x") # - elif y.shape == (ny+1,): + elif y.shape == (ny + 1,): # extend to 2D: - yy = numpy.zeros((ny+1,nx+1)) - for i in range(nx+1) : - yy[:,i] = y - #endfor + yy = numpy.zeros((ny + 1, nx + 1)) + for i in range(nx + 1): + yy[:, i] = y + # endfor # elif y.shape == (ny,): # form bounds: yb = mid2bounds(y) # extend to 2D: - yy = numpy.zeros((ny+1,nx+1)) - for i in range(nx+1) : - yy[:,i] = yb - #endfor + yy = numpy.zeros((ny + 1, nx + 1)) + for i in range(nx + 1): + yy[:, i] = yb + # endfor # - else : - print( f"ERROR - unsupported y shape {y.shape} for field shape {shp}" ) + else: + print(f"ERROR - unsupported y shape {y.shape} for field shape {shp}") raise Eyception - #endif - + # endif + # ok - if bounds : - yyb = numpy.zeros((ny,nx,4)) - yyb[:,:,0] = yy[0:ny ,0:nx ] - yyb[:,:,1] = yy[0:ny ,1:nx+1] - yyb[:,:,2] = yy[1:ny+1,1:nx+1] - yyb[:,:,3] = yy[1:ny+1,0:nx ] + if bounds: + yyb = numpy.zeros((ny, nx, 4)) + yyb[:, :, 0] = yy[0:ny, 0:nx] + yyb[:, :, 1] = yy[0:ny, 1 : nx + 1] + yyb[:, :, 2] = yy[1 : ny + 1, 1 : nx + 1] + yyb[:, :, 3] = yy[1 : ny + 1, 0:nx] return yyb - else : + else: return yy - #endif + # endif + # enddef GetCornerGridY diff --git a/src/cso/cso_viirs.py b/src/cso/cso_viirs.py index ef8b20bffc8e345fe5feef9da64a8fafb4dd69b0..92b57141fd22f1dfddfe708cc37560feabab4855 100644 --- a/src/cso/cso_viirs.py +++ b/src/cso/cso_viirs.py @@ -76,77 +76,77 @@ class VIIRS_File(object): Example of variables:: dimensions: - Idx_Atrack = 404 ; // along-track - Idx_Xtrack = 400 ; // cross-track - Land_Bands = 3 ; - Ocean_Bands = 7 ; - Reflectance_Bands = 8 ; + Idx_Atrack = 404 ; // along-track + Idx_Xtrack = 400 ; // cross-track + Land_Bands = 3 ; + Ocean_Bands = 7 ; + Reflectance_Bands = 8 ; variables: - float Idx_Atrack (Idx_Atrack) ; - float Idx_Xtrack (Idx_Xtrack) ; - float Latitude (Idx_Atrack, Idx_Xtrack) ; - float Longitude (Idx_Atrack, Idx_Xtrack) ; - float Cell_Average_Elevation_Land (Idx_Atrack, Idx_Xtrack) ; - float Cell_Average_Elevation_Ocean (Idx_Atrack, Idx_Xtrack) ; - - double Scan_Start_Time (Idx_Atrack, Idx_Xtrack) ; - - float Aerosol_Optical_Thickness_550_Land (Idx_Atrack, Idx_Xtrack) ; - float Aerosol_Optical_Thickness_550_Land_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; - float Aerosol_Optical_Thickness_550_STDV_Land (Idx_Atrack, Idx_Xtrack) ; - float Aerosol_Optical_Thickness_550_Expected_Uncertainty_Land (Idx_Atrack, Idx_Xtrack) ; - int Aerosol_Optical_Thickness_QA_Flag_Land (Idx_Atrack, Idx_Xtrack) ; - int Aerosol_Type_Land (Idx_Atrack, Idx_Xtrack) ; - int Algorithm_Flag_Land (Idx_Atrack, Idx_Xtrack) ; - - float Aerosol_Optical_Thickness_550_Ocean (Idx_Atrack, Idx_Xtrack) ; - float Aerosol_Optical_Thickness_550_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; - float Aerosol_Optical_Thickness_550_STDV_Ocean (Idx_Atrack, Idx_Xtrack) ; - float Aerosol_Optical_Thickness_550_Expected_Uncertainty_Ocean (Idx_Atrack, Idx_Xtrack) ; - int Aerosol_Optical_Thickness_QA_Flag_Ocean (Idx_Atrack, Idx_Xtrack) ; - int Aerosol_Type_Ocean (Idx_Atrack, Idx_Xtrack) ; - int Algorithm_Flag_Ocean (Idx_Atrack, Idx_Xtrack) ; - - float Aerosol_Optical_Thickness_550_Land_Ocean (Idx_Atrack, Idx_Xtrack) ; - float Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; - int Aerosol_Type_Land_Ocean (Idx_Atrack, Idx_Xtrack) ; - - float Angstrom_Exponent_Land (Idx_Atrack, Idx_Xtrack) ; - float Angstrom_Exponent_Land_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; - float Angstrom_Exponent_Ocean (Idx_Atrack, Idx_Xtrack) ; - float Angstrom_Exponent_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; - float Angstrom_Exponent_Land_Ocean (Idx_Atrack, Idx_Xtrack) ; - float Angstrom_Exponent_Land_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; - - float Fine_Mode_Fraction_550_Ocean (Idx_Atrack, Idx_Xtrack) ; - float Fine_Mode_Fraction_550_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; - - float Land_Bands (Land_Bands) ; - float Spectral_Aerosol_Optical_Thickness_Land (Land_Bands , Idx_Atrack, Idx_Xtrack) ; - float Spectral_Single_Scattering_Albedo_Land (Land_Bands , Idx_Atrack, Idx_Xtrack) ; - float Spectral_Surface_Reflectance (Land_Bands , Idx_Atrack, Idx_Xtrack) ; - float Ocean_Bands (Ocean_Bands) ; - float Spectral_Aerosol_Optical_Thickness_Ocean (Ocean_Bands , Idx_Atrack, Idx_Xtrack) ; - float Reflectance_Bands (Reflectance_Bands) ; - float Spectral_TOA_Reflectance_Land (Idx_Atrack, Idx_Xtrack, Reflectance_Bands) ; - float Spectral_TOA_Reflectance_Ocean (Idx_Atrack, Idx_Xtrack, Reflectance_Bands) ; - - int Number_Of_Pixels_Used_Land (Idx_Atrack, Idx_Xtrack) ; - int Number_Of_Pixels_Used_Ocean (Idx_Atrack, Idx_Xtrack) ; - int Number_Valid_Pixels (Idx_Atrack, Idx_Xtrack) ; - - float Ocean_Sum_Squares (Idx_Atrack, Idx_Xtrack) ; - float Precipitable_Water (Idx_Atrack, Idx_Xtrack) ; - float Relative_Azimuth_Angle (Idx_Atrack, Idx_Xtrack) ; - float Scattering_Angle (Idx_Atrack, Idx_Xtrack) ; - float Solar_Zenith_Angle (Idx_Atrack, Idx_Xtrack) ; - float TOA_NDVI (Idx_Atrack, Idx_Xtrack) ; - float Total_Column_Ozone (Idx_Atrack, Idx_Xtrack) ; - float Unsuitable_Pixel_Fraction_Land_Ocean (Idx_Atrack, Idx_Xtrack) ; - float Viewing_Zenith_Angle (Idx_Atrack, Idx_Xtrack) ; - float Wind_Direction (Idx_Atrack, Idx_Xtrack) ; - float Wind_Speed (Idx_Atrack, Idx_Xtrack) ; + float Idx_Atrack (Idx_Atrack) ; + float Idx_Xtrack (Idx_Xtrack) ; + float Latitude (Idx_Atrack, Idx_Xtrack) ; + float Longitude (Idx_Atrack, Idx_Xtrack) ; + float Cell_Average_Elevation_Land (Idx_Atrack, Idx_Xtrack) ; + float Cell_Average_Elevation_Ocean (Idx_Atrack, Idx_Xtrack) ; + + double Scan_Start_Time (Idx_Atrack, Idx_Xtrack) ; + + float Aerosol_Optical_Thickness_550_Land (Idx_Atrack, Idx_Xtrack) ; + float Aerosol_Optical_Thickness_550_Land_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; + float Aerosol_Optical_Thickness_550_STDV_Land (Idx_Atrack, Idx_Xtrack) ; + float Aerosol_Optical_Thickness_550_Expected_Uncertainty_Land (Idx_Atrack, Idx_Xtrack) ; + int Aerosol_Optical_Thickness_QA_Flag_Land (Idx_Atrack, Idx_Xtrack) ; + int Aerosol_Type_Land (Idx_Atrack, Idx_Xtrack) ; + int Algorithm_Flag_Land (Idx_Atrack, Idx_Xtrack) ; + + float Aerosol_Optical_Thickness_550_Ocean (Idx_Atrack, Idx_Xtrack) ; + float Aerosol_Optical_Thickness_550_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; + float Aerosol_Optical_Thickness_550_STDV_Ocean (Idx_Atrack, Idx_Xtrack) ; + float Aerosol_Optical_Thickness_550_Expected_Uncertainty_Ocean (Idx_Atrack, Idx_Xtrack) ; + int Aerosol_Optical_Thickness_QA_Flag_Ocean (Idx_Atrack, Idx_Xtrack) ; + int Aerosol_Type_Ocean (Idx_Atrack, Idx_Xtrack) ; + int Algorithm_Flag_Ocean (Idx_Atrack, Idx_Xtrack) ; + + float Aerosol_Optical_Thickness_550_Land_Ocean (Idx_Atrack, Idx_Xtrack) ; + float Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; + int Aerosol_Type_Land_Ocean (Idx_Atrack, Idx_Xtrack) ; + + float Angstrom_Exponent_Land (Idx_Atrack, Idx_Xtrack) ; + float Angstrom_Exponent_Land_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; + float Angstrom_Exponent_Ocean (Idx_Atrack, Idx_Xtrack) ; + float Angstrom_Exponent_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; + float Angstrom_Exponent_Land_Ocean (Idx_Atrack, Idx_Xtrack) ; + float Angstrom_Exponent_Land_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; + + float Fine_Mode_Fraction_550_Ocean (Idx_Atrack, Idx_Xtrack) ; + float Fine_Mode_Fraction_550_Ocean_Best_Estimate (Idx_Atrack, Idx_Xtrack) ; + + float Land_Bands (Land_Bands) ; + float Spectral_Aerosol_Optical_Thickness_Land (Land_Bands , Idx_Atrack, Idx_Xtrack) ; + float Spectral_Single_Scattering_Albedo_Land (Land_Bands , Idx_Atrack, Idx_Xtrack) ; + float Spectral_Surface_Reflectance (Land_Bands , Idx_Atrack, Idx_Xtrack) ; + float Ocean_Bands (Ocean_Bands) ; + float Spectral_Aerosol_Optical_Thickness_Ocean (Ocean_Bands , Idx_Atrack, Idx_Xtrack) ; + float Reflectance_Bands (Reflectance_Bands) ; + float Spectral_TOA_Reflectance_Land (Idx_Atrack, Idx_Xtrack, Reflectance_Bands) ; + float Spectral_TOA_Reflectance_Ocean (Idx_Atrack, Idx_Xtrack, Reflectance_Bands) ; + + int Number_Of_Pixels_Used_Land (Idx_Atrack, Idx_Xtrack) ; + int Number_Of_Pixels_Used_Ocean (Idx_Atrack, Idx_Xtrack) ; + int Number_Valid_Pixels (Idx_Atrack, Idx_Xtrack) ; + + float Ocean_Sum_Squares (Idx_Atrack, Idx_Xtrack) ; + float Precipitable_Water (Idx_Atrack, Idx_Xtrack) ; + float Relative_Azimuth_Angle (Idx_Atrack, Idx_Xtrack) ; + float Scattering_Angle (Idx_Atrack, Idx_Xtrack) ; + float Solar_Zenith_Angle (Idx_Atrack, Idx_Xtrack) ; + float TOA_NDVI (Idx_Atrack, Idx_Xtrack) ; + float Total_Column_Ozone (Idx_Atrack, Idx_Xtrack) ; + float Unsuitable_Pixel_Fraction_Land_Ocean (Idx_Atrack, Idx_Xtrack) ; + float Viewing_Zenith_Angle (Idx_Atrack, Idx_Xtrack) ; + float Wind_Direction (Idx_Atrack, Idx_Xtrack) ; + float Wind_Speed (Idx_Atrack, Idx_Xtrack) ; Arguments: @@ -167,13 +167,15 @@ class VIIRS_File(object): self.filenames = filenames ## check ... - #if not os.path.isfile(filename): + # if not os.path.isfile(filename): # logging.error("file not found : %s" % filename) # raise Exception ## endif # open: - with xarray.open_mfdataset(self.filenames, concat_dim="Idx_Atrack", combine="nested") as ds: + with xarray.open_mfdataset( + self.filenames, concat_dim="Idx_Atrack", combine="nested" + ) as ds: # copy some dimensions for convenience: self.nscan = ds.sizes["Idx_Atrack"] # along track self.ngpix = ds.sizes["Idx_Xtrack"] # cross track @@ -209,7 +211,9 @@ class VIIRS_File(object): import xarray # open: - with xarray.open_mfdataset(self.filenames, concat_dim="Idx_Atrack", combine="nested") as ds: + with xarray.open_mfdataset( + self.filenames, concat_dim="Idx_Atrack", combine="nested" + ) as ds: # check .. if varname not in ds: logging.error(f"no variable '{varname}' in file: {self.filename}") @@ -240,7 +244,7 @@ class VIIRS_File(object): # same .. pass # - else : + else: logging.error('unsupported conversion "%s" requested ...' % conversion) raise Exception # endif @@ -378,7 +382,7 @@ class VIIRS_File(object): # update history: history.append(f"selected pixels with '{varname}' >= {smin}") # - # ~ minimum value, + # ~ minimum value, # check 2 variables, for example if disjunct land/ocean variables are present: elif filtertype == "min2": # which variable ? @@ -400,7 +404,7 @@ class VIIRS_File(object): if numpy.any(numpy.isnan(da2.data)): xdata2[numpy.where(numpy.isnan(da2.data))] = fmin - 1 # apply filter, also select on not-nan to avoid warnings from comparisons: - selected = selected & ( (xdata >= fmin) | (xdata2 >= fmin) ) + selected = selected & ((xdata >= fmin) | (xdata2 >= fmin)) # update history: history.append(f"selected pixels with '{varname}' or '{varname2}' >= {smin}") # @@ -668,7 +672,6 @@ class CSO_VIIRS_File(cso_file.CSO_File): # check target dims: # ~ single value per pixel: if vdims == ("pixel",): - # extract 1D array of pixels from 2D track: if oda.dims in [("Idx_Atrack", "Idx_Xtrack")]: # create 1D array with data from selected pixels, copy attributes: @@ -710,22 +713,22 @@ class CSO_VIIRS_File(cso_file.CSO_File): opath = rcf.get(f"{vkey}.from") # get data array: oda = sfile.GetData(opath, units=vunits) - + # reference time: if oda.attrs["long_name"] == "Scan start time (TAI93)": tref = numpy.datetime64("1993") - else : + else: logging.error(f"could not guess time units from attributes:") logging.error(oda.attrs) raise Exception - #endif - + # endif + # convert from reference and timedelta64 values: tvalues = tref + oda.values # create variable, extract selected pixels: da = xarray.DataArray( - tvalues[iis,iip], + tvalues[iis, iip], dims=vdims, coords={"pixel": pixel}, attrs=oda.attrs, @@ -733,26 +736,25 @@ class CSO_VIIRS_File(cso_file.CSO_File): # ensure corrrect output: da.attrs["standard_name"] = "time" da.attrs["long_name"] = "time" - tyear = pandas.to_datetime( tvalues[0,0] ).year + tyear = pandas.to_datetime(tvalues[0, 0]).year da.encoding["units"] = f"seconds since {tyear}-01-01 00:00:000" da.encoding["calendar"] = "standard" - # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # pixel corners: - elif special in ["longitude","latitude"]: + elif special in ["longitude", "latitude"]: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # name of original field: opath = rcf.get(f"{vkey}.from") # get data array: oda = sfile.GetData(opath, units=vunits) - + # apply correction of even scan lines: - values = self.AERDB_Grid_Correction( oda.data ) + values = self.AERDB_Grid_Correction(oda.data) # create variable: da = xarray.DataArray( - values[iis,iip], + values[iis, iip], dims=vdims, coords={"pixel": pixel}, attrs=oda.attrs, @@ -760,7 +762,7 @@ class CSO_VIIRS_File(cso_file.CSO_File): # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # pixel corners: - elif special in ["longitude_bounds","latitude_bounds"]: + elif special in ["longitude_bounds", "latitude_bounds"]: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # name of original field: @@ -773,29 +775,35 @@ class CSO_VIIRS_File(cso_file.CSO_File): # bound arrays for longitude etc: if oda.dims in [("Idx_Atrack", "Idx_Xtrack")]: # apply correction of even scan lines: - values1 = self.AERDB_Grid_Correction( oda.data ) + values1 = self.AERDB_Grid_Correction(oda.data) # adhoc .. if "longitude" in special: # info .. logging.info(" wrap longitudes around the world ...") # interpolate/extrapolate to corners, return (scan,ground,4) array: - values2 = cso_tools.GetCornerGridX( values1.shape, x=values1, bounds=True ) + values2 = cso_tools.GetCornerGridX( + values1.shape, x=values1, bounds=True + ) # reset longitudes to be wrapped around the world in one contineous series: values_bounds = self.ResetLongitudeBounds(values2) - else : + else: # interpolate/extrapolate to corners, return (scan,ground,4) array: - values_bounds = cso_tools.GetCornerGridY( values1.shape, y=values1, bounds=True ) - #endif + values_bounds = cso_tools.GetCornerGridY( + values1.shape, y=values1, bounds=True + ) + # endif # create variable: da = xarray.DataArray( - values_bounds[iis,iip,:], + values_bounds[iis, iip, :], dims=vdims, coords={"pixel": pixel}, attrs=oda.attrs, ) else: - logging.error(f"could not convert from dims '{oda.dims}' to target dims '{vdims}'") + logging.error( + f"could not convert from dims '{oda.dims}' to target dims '{vdims}'" + ) raise Exception # endif else: @@ -819,7 +827,7 @@ class CSO_VIIRS_File(cso_file.CSO_File): # check shape: if oda.dims in [("Idx_Atrack", "Idx_Xtrack")]: # apply correction of even scan lines: - values1 = self.AERDB_Grid_Correction( oda.data ) + values1 = self.AERDB_Grid_Correction(oda.data) # create 2D array with data from selected scan lines: da = xarray.DataArray( values1[is0:is1, :], @@ -829,7 +837,9 @@ class CSO_VIIRS_File(cso_file.CSO_File): ) else: - logging.error(f"could not convert from dims '{oda.dims}' to target dims '{vdims}'") + logging.error( + f"could not convert from dims '{oda.dims}' to target dims '{vdims}'" + ) raise Exception # endif else: @@ -840,7 +850,7 @@ class CSO_VIIRS_File(cso_file.CSO_File): # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # full track, corners: - elif special in ["track_longitude_bounds","track_latitude_bounds"]: + elif special in ["track_longitude_bounds", "track_latitude_bounds"]: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # name of original field: @@ -853,7 +863,7 @@ class CSO_VIIRS_File(cso_file.CSO_File): # check shape: if oda.dims in [("Idx_Atrack", "Idx_Xtrack")]: # apply correction of even scan lines: - values1 = self.AERDB_Grid_Correction( oda.data ) + values1 = self.AERDB_Grid_Correction(oda.data) # select scan lines: values2 = values1[is0:is1, :] # switch: @@ -861,13 +871,17 @@ class CSO_VIIRS_File(cso_file.CSO_File): # info .. logging.info(" wrap longitudes around the world ...") # interpolate/extrapolate to corners, return (scan,ground,4) array: - values3 = cso_tools.GetCornerGridX( values2.shape, x=values2, bounds=True ) + values3 = cso_tools.GetCornerGridX( + values2.shape, x=values2, bounds=True + ) # reset longitudes to be wrapped around the world in one contineous series: values_bounds = self.ResetLongitudeBounds(values3) - else : + else: # interpolate/extrapolate to corners, return (scan,ground,4) array: - values_bounds = cso_tools.GetCornerGridY( values2.shape, y=values2, bounds=True ) - #endif + values_bounds = cso_tools.GetCornerGridY( + values2.shape, y=values2, bounds=True + ) + # endif # create 2D array (+corners) with data from selected scan lines: da = xarray.DataArray( @@ -880,7 +894,9 @@ class CSO_VIIRS_File(cso_file.CSO_File): attrs=oda.attrs, ) else: - logging.error(f"could not convert from dims '{oda.dims}' to target dims '{vdims}'") + logging.error( + f"could not convert from dims '{oda.dims}' to target dims '{vdims}'" + ) raise Exception # endif else: @@ -895,17 +911,17 @@ class CSO_VIIRS_File(cso_file.CSO_File): # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # extract wavelengh: - key,val = special.split("=") + key, val = special.split("=") wl = int(val) - + # name(s) of original field: opaths = rcf.get(f"{vkey}.from").split() # count: nvar = len(opaths) - + # no result yet: values = None - + # loop over input fields: for ivar in range(nvar): # current: @@ -916,59 +932,65 @@ class CSO_VIIRS_File(cso_file.CSO_File): oda = sfile.GetData(opath, units=vunits) # init result? - if values is None : + if values is None: # shape: - nw,ny,nx = oda.shape + nw, ny, nx = oda.shape # empty array, first dim is number of input fields: - values = numpy.zeros((nvar,ny,nx)) - #endif + values = numpy.zeros((nvar, ny, nx)) + # endif # assumed wavelength coordinate: cname = oda.dims[0] # get band coordinate: - if cname in ["Land_Bands","Ocean_Bands"]: + if cname in ["Land_Bands", "Ocean_Bands"]: # extract: band_da = sfile.GetData(cname) - else : - logging.error(f"unsupported wavelength dimension '{cname}' for variable '{opath}'") + else: + logging.error( + f"unsupported wavelength dimension '{cname}' for variable '{opath}'" + ) raise Exception - #endif + # endif # search index: - kk, = numpy.where( band_da.values == wl ) - if len(kk) == 0 : + (kk,) = numpy.where(band_da.values == wl) + if len(kk) == 0: logger.error(f"wavelength {wl} not found in coordinate '{cname}'") raise Exeption - #endif + # endif # extract: iwl = kk[0] # info .. logging.info(f"{indent} selected wavelenght record {iwl} ({wl})") - + # copy record: - values[ivar] = oda.data[iwl,:,:] + values[ivar] = oda.data[iwl, :, :] + + # endfor # input fields - #endfor # input fields - # average over the retrievals, probably a land and a ocean field: with warnings.catch_warnings(): - warnings.filterwarnings(action='ignore', message='Mean of empty slice') - pat = numpy.nanmean( values, axis=0 ) - #endwith + warnings.filterwarnings(action="ignore", message="Mean of empty slice") + pat = numpy.nanmean(values, axis=0) + # endwith # check target dims: if vdims == ("pixel",): # check shape: - if oda.dims in [("Land_Bands", "Idx_Atrack", "Idx_Xtrack"), - ("Ocean_Bands", "Idx_Atrack", "Idx_Xtrack")]: + if oda.dims in [ + ("Land_Bands", "Idx_Atrack", "Idx_Xtrack"), + ("Ocean_Bands", "Idx_Atrack", "Idx_Xtrack"), + ]: # create 1D array with selected pixels: da = xarray.DataArray( - pat[iis,iip], + pat[iis, iip], dims=vdims, coords={"pixel": pixel}, attrs=oda.attrs, ) else: - logging.error(f"could not convert from dims '{oda.dims}' to target dims '{vdims}'") + logging.error( + f"could not convert from dims '{oda.dims}' to target dims '{vdims}'" + ) raise Exception # endif else: @@ -1005,11 +1027,10 @@ class CSO_VIIRS_File(cso_file.CSO_File): # endfor # target variables # enddef AddSelection - - # * - def AERDB_Grid_Correction( self, values ): + # * + def AERDB_Grid_Correction(self, values): """ Correct locations of the odd scan lines, which seem to become dis-located towards the edge of the swath. @@ -1022,38 +1043,38 @@ class CSO_VIIRS_File(cso_file.CSO_File): * ``xx`` : array of shape ``(nx,ny)`` with corrected locations """ - + # modules: import numpy # shape: - ny,nx = values.shape + ny, nx = values.shape # target arrays: - xx = numpy.zeros((ny,nx)) + xx = numpy.zeros((ny, nx)) # loop over rows: for j in range(ny): # odd or even row - if j % 2 == 0 : + if j % 2 == 0: # odd row; copy: - xx[j,:] = values[j,:] - else : + xx[j, :] = values[j, :] + else: # even row; last scanline could not be averaged ... - if j < ny-1: + if j < ny - 1: # average of surrounding lines - xx[j,:] = 0.5 * values[j-1,:] + 0.5 * values[j+1,:] + xx[j, :] = 0.5 * values[j - 1, :] + 0.5 * values[j + 1, :] else: # last scan line, extrapolate: - xx[j,:] = 2 * xx[j-1,:] - xx[j-2,:] - #endif - #endif # odd or even row - #endfor + xx[j, :] = 2 * xx[j - 1, :] - xx[j - 2, :] + # endif + # endif # odd or even row + # endfor # ok return xx - #enddef AERDB_Grid_Correction + # enddef AERDB_Grid_Correction # * @@ -1161,7 +1182,7 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): The name of the output files should be specified and might include templates for the time (take from the first pixel) and the orbit number:: - + ! output filename: ! - times are taken from mid of selection, rounded to hours ! - templates: %{orbit} @@ -1171,7 +1192,7 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): ! directory creation mode: .dmode : 0o775 - + A flag is read to decide if existing output files should be renewed or kept:: .renew : True @@ -1240,16 +1261,16 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): # info ... tfmt = "%Y-%m-%d %H:%M" logging.info(f"{indent}timerange: [%s,%s]" % (t1.strftime(tfmt), t2.strftime(tfmt))) - + # listing table: listing_file = self.GetSetting("input.listing") # read: - listing = cso_file.CSO_Listing( listing_file ) + listing = cso_file.CSO_Listing(listing_file) # info ... logging.info(f"{indent}number of files : %i" % len(listing)) # path: - listing_dir = os.path.dirname( listing_file ) + listing_dir = os.path.dirname(listing_file) # select records with start time inside time range: xlst = listing.Select(tr=(t1, t2)) @@ -1280,15 +1301,15 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): for orbit in orbits: # info ... logging.info(f"{indent} orbit '{orbit}' ...") - + # select orbits: - olst = xlst.Select( expr=f"%{{orbit}} == '{orbit}'", verbose=False ) + olst = xlst.Select(expr=f"%{{orbit}} == '{orbit}'", verbose=False) # info .. - logging.info( f"{indent} process {len(olst)} files ..." ) + logging.info(f"{indent} process {len(olst)} files ...") # next orbit if none: - if len(olst) == 0 : + if len(olst) == 0: continue - + # list of input files: input_filenames = [] # start time: @@ -1298,27 +1319,27 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): # current: rec = olst.GetRecord(irec) # full path: - input_filename = os.path.join( listing_dir, rec["filename"] ) + input_filename = os.path.join(listing_dir, rec["filename"]) # store: - input_filenames.append( input_filename ) + input_filenames.append(input_filename) # start time: - if t0 is None : + if t0 is None: t0 = rec["start_time"] - else : - t0 = min( t0, rec["start_time"] ) - #endif - #endfor + else: + t0 = min(t0, rec["start_time"]) + # endif + # endfor # target files per orbit: - output_filename = t0.strftime( output_filename__template ) + output_filename = t0.strftime(output_filename__template) # replace templates: - output_filename = output_filename.replace("%{orbit}",str(orbit)) + output_filename = output_filename.replace("%{orbit}", str(orbit)) # info ... logging.info(f"{indent} output file {output_filename} ...") # output dir: - cso_file.CheckDir( output_filename, dmode=dmode ) + cso_file.CheckDir(output_filename, dmode=dmode) # split filename at extension: fname, ext = os.path.splitext(output_filename) @@ -1352,34 +1373,33 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): # set flag: create = True # - elif renew : + elif renew: # info .. logging.info(" renew current file ...") # set flag: create = True # - else : + else: # info ... logging.info(" keep existing file ...") # set flag: create = False - #endif + # endif # create? if create: - # sort inplace: input_filenames.sort() # init filenamelist for history line: - input_history = '' + input_history = "" # info ... logging.info(f"{indent} input files:") for input_filename in input_filenames: - logging.info( f"{indent} {input_filename}" ) + logging.info(f"{indent} {input_filename}") # update history description: input_history += " " + os.path.basename(input_filename) - #endfor + # endfor # info ... logging.info(f"{indent} open ...") @@ -1393,17 +1413,17 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): nselected = selected.sum() # info ... logging.info(f"{indent} selected {nselected} of {selected.size} pixels") - + # adhoc: at least 3 scan-lines needed for grid correction ... if nselected > 0: # selection indices: iis, iip = numpy.where(selected) # number of scan lines: nscan = iis.max() - iis.min() + 1 - else : + else: # no lines at all .. nscan = 0 - #endif + # endif # any ? if nselected == 0: @@ -1412,20 +1432,22 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): # write message file: with open(output_msgfile, "w") as f: f.write(f"no pixels selected in:\n") - for input_filename in input_filenames : - f.write( f" {input_filename}\n") - #endfor + for input_filename in input_filenames: + f.write(f" {input_filename}\n") + # endfor # endwith - + elif nscan < 3: # info .. - logging.warning(f"{indent} only {nscan} scan lines selected, need at least 3 ...") + logging.warning( + f"{indent} only {nscan} scan lines selected, need at least 3 ..." + ) # write message file: with open(output_msgfile, "w") as f: f.write(f"not enough scan lines selected in:\n") - for input_filename in input_filenames : - f.write( f" {input_filename}\n") - #endfor + for input_filename in input_filenames: + f.write(f" {input_filename}\n") + # endfor # endwith else: @@ -1434,16 +1456,18 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): # new: csf = CSO_VIIRS_File() # add: - csf.AddSelection( sfile, selected, self.rcf, self.rcbase, indent=f"{indent} ") + csf.AddSelection( + sfile, selected, self.rcf, self.rcbase, indent=f"{indent} " + ) # update history: history.append(f"added {nselected} pixels from {input_history}") ## update attributes: - #for key in ["orbit", "processing", "processor_version", "collection"]: + # for key in ["orbit", "processing", "processor_version", "collection"]: # attrs[key] = rec[key] ## endfor - #attrs["orbit_file"] = os.path.basename(input_filename) + # attrs["orbit_file"] = os.path.basename(input_filename) # write: csf.Write( @@ -1455,17 +1479,17 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): ) ## testing ... - #logging.warning( f"{indent}break after first created file ..." ) - #break + # logging.warning( f"{indent}break after first created file ..." ) + # break # endif # any selected - + ## testing ... - #logging.warning( 'break after first input file ...' ) - #break + # logging.warning( 'break after first input file ...' ) + # break # endif # renew output file - + # endfor # orbits # info ... @@ -1479,7 +1503,6 @@ class CSO_VIIRS_Convert(utopya.UtopyaRc): # endclass CSO_VIIRS_Convert - ######################################################################## ### ### end