diff --git a/examples/example-unstructured.py b/examples/example-unstructured.py new file mode 100755 index 0000000..e5aa559 --- /dev/null +++ b/examples/example-unstructured.py @@ -0,0 +1,345 @@ +#!/usr/bin/env python3 +import atlas4py as atlas +import math +import numpy as np + +atlas.initialize() # Required, will also initialize MPI + +# constants +earth_radius = 6371229. +km = 1000. + +xy = np.array([ + [180,0], + [90,0], + [-90,0], + [0,90], + [0,-90], + [0,0], + [18,0], + [36,0], + [54,0], + [72,0], + [108,0], + [126,0], + [144,0], + [162,0], + [-162,0], + [-144,0], + [-126,0], + [-108,0], + [-72,0], + [-54,0], + [-36,0], + [-18,0], + [0,18], + [0,36], + [0,54], + [0,72], + [180,72], + [180,54], + [180,36], + [180,18], + [180,-18], + [180,-36], + [180,-54], + [180,-72], + [0,-72], + [0,-54], + [0,-36], + [0,-18], + [90,18], + [90,36], + [90,54], + [90,72], + [-90,72], + [-90,54], + [-90,36], + [-90,18], + [-90,-18], + [-90,-36], + [-90,-54], + [-90,-72], + [90,-72], + [90,-54], + [90,-36], + [90,-18], + [123.974,-58.6741], + [154.087,-16.9547], + [154.212,-58.8675], + [114.377,-41.9617], + [125.567,-23.5133], + [137.627,-40.8524], + [106.162,-24.5874], + [158.508,-38.55], + [137.826,-72.8109], + [142.103,-26.799], + [138.256,-13.8871], + [168.39,-24.3266], + [168.954,-12.0094], + [117.333,-12.35], + [102.254,-11.1537], + [120.307,59.7167], + [107.196,26.0167], + [144.768,28.3721], + [150.891,60.0343], + [164.566,25.5053], + [116.851,14.0295], + [124.84,28.3978], + [157.901,42.042], + [111.41,43.1056], + [134.333,44.6677], + [103.277,11.707], + [135.358,73.2119], + [135.349,14.2311], + [153.48,13.386], + [168.071,11.5344], + [-162.99,26.3775], + [-147.519,56.1313], + [-122.579,27.4824], + [-117.909,59.2376], + [-104.052,27.3616], + [-153.107,14.9717], + [-110.833,41.7436], + [-144.847,32.8534], + [-161.546,42.1031], + [-129.866,44.5201], + [-133.883,72.4163], + [-166.729,11.8907], + [-135.755,15.2529], + [-106.063,14.4869], + [-119.452,11.7037], + [-146.026,-58.6741], + [-115.913,-16.9547], + [-115.788,-58.8675], + [-155.623,-41.9617], + [-144.433,-23.5133], + [-132.373,-40.8524], + [-163.838,-24.5874], + [-111.492,-38.55], + [-132.174,-72.8109], + [-127.897,-26.799], + [-131.744,-13.8871], + [-101.61,-24.3266], + [-101.046,-12.0094], + [-152.667,-12.35], + [-167.746,-11.1537], + [-14.0127,-27.2963], + [-59.193,-57.0815], + [-56.465,-19.5751], + [-27.056,-59.3077], + [-57.124,-35.9752], + [-33.4636,-28.3914], + [-74.8037,-46.8602], + [-40.089,-45.1376], + [-74.8149,-28.3136], + [-21.3072,-42.2177], + [-44.0778,-72.6353], + [-19.6969,-12.8527], + [-40.1318,-12.1601], + [-72.691,-11.4129], + [-56.0261,58.6741], + [-25.9127,16.9547], + [-25.7876,58.8675], + [-65.6229,41.9617], + [-54.4335,23.5133], + [-42.373,40.8524], + [-73.838,24.5874], + [-21.4917,38.55], + [-42.1744,72.8109], + [-37.8974,26.799], + [-41.7437,13.8871], + [-11.6095,24.3266], + [-11.0459,12.0094], + [-62.667,12.35], + [-77.7456,11.1537], + [30.3071,59.7167], + [17.1956,26.0167], + [54.7676,28.3721], + [60.8915,60.0343], + [74.5657,25.5053], + [26.8506,14.0295], + [34.8398,28.3978], + [67.9014,42.042], + [21.41,43.1056], + [44.3335,44.6677], + [13.2772,11.707], + [45.3579,73.2119], + [45.3492,14.2311], + [63.4799,13.386], + [78.0714,11.5344], + [17.01,-26.3775], + [32.4806,-56.1313], + [57.4213,-27.4824], + [62.0912,-59.2376], + [75.9483,-27.3616], + [26.893,-14.9717], + [69.1672,-41.7436], + [35.1527,-32.8534], + [18.4543,-42.1031], + [50.1344,-44.5201], + [46.1172,-72.4163], + [13.2711,-11.8907], + [44.2448,-15.2529], + [73.9368,-14.4869], + [60.5478,-11.7037] +], dtype=np.float64) + +# approximate ad hoc euclidian resolution in meters +# resolution = 2500 * km / earth_radius +# grid = atlas.UnstructuredGrid(xy) + +###  OR pass x,y individually +# grid = atlas.UnstructuredGrid(xy[:,0],xy[:,1]) + +### OR existing named grids + +# approximate euclidian resolution in meters adimensionalized to unit sphere +resolution = 100 * km / earth_radius +grid = atlas.Grid("H128") # HEALPix grid + +# grid = atlas.Grid("O320") # HEALPix grid +# resolution = 60 * 1000 # ad hoc + +# grid = atlas.Grid("O160") # HEALPix grid +# resolution = 120 * 1000 # ad hoc + + +### Create FunctionSpace + +functionspace = atlas.functionspace.PointCloud(grid, halo_radius=resolution*2, geometry="UnitSphere") + +### Access parallelisation information, typically nb_parts == mpi_size, part == mpi_rank +# print("size", functionspace.size) +# print("nb_parts", functionspace.nb_parts) +# print("part", functionspace.part) + +### Access fields as numpy arrays, if needed +lonlat = atlas.make_view(functionspace.lonlat) # longitude latitude in degrees +ghost = atlas.make_view(functionspace.ghost) # ghost: 1 for ghost, 0 for owned +partition = atlas.make_view(functionspace.partition) # partition owning point (0-based) +remote_index = atlas.make_view(functionspace.remote_index) # local index on partition owning point (careful, 1-based when atlas_HAVE_FORTRAN) +global_index = atlas.make_view(functionspace.global_index) # global index across partitions (always 1-based) + +# internal = [ i for i in range(functionspace.size) if ghost[i] == 0 ] +# ghosts = [ i for i in range(functionspace.size) if ghost[i] == 1 ] + +### Create field and initialize +field = functionspace.create_field(name="myfield") +view = atlas.make_view(field) +for n in range(field.size): + if not ghost[n]: + lon = lonlat[n,0] * math.pi / 180. + lat = lonlat[n,1] * math.pi / 180. + view[n] = math.cos(4.*lat) * math.sin(8.*lon) + else: + view[n] = 0 + + +### Halo exchange +field.halo_dirty = True # if set to False, following halo_exchange will have no effect. Note it was already True upon create_field +field.halo_exchange() # comment to see halo with value 0, but validation below will fail +field.halo_exchange() # this will be ignored because field.halo_dirty was set to False during first call + + +### Validate halo exchange +validation_passed = True +for n in range(field.size): + if ghost[n]: + lon = lonlat[n,0] * math.pi / 180. + lat = lonlat[n,1] * math.pi / 180. + if view[n] != math.cos(4.*lat) * math.sin(8.*lon): + validation_passed = False + +if functionspace.part == 0: + if validation_passed: + print("validation of halo exchange passed") + else: + print("validation of halo exchange failed") + + +### Extra: search functionality +class Search: + def __init__(self, functionspace): + self.functionspace = functionspace + self.lonlat = atlas.make_view(self.functionspace.lonlat) + self.kdtree = atlas.IndexKDTree(geometry="UnitSphere") + self.kdtree.build(lonlat) + + def nearest_indices_within_radius(self, i, radius): + # radius is in metres on Earth geometry + closest_indices = self.kdtree.closest_indices_within_radius(lon=self.lonlat[i,0], lat=self.lonlat[i,1], radius=radius) + return closest_indices[1:] # first point is "i" itself, so skip it + + +search = Search(functionspace) +radius = resolution +index = 0 +nearest = search.nearest_indices_within_radius(index,radius) +if functionspace.part == 0: + print("nearest global indices to local index",index," ( global index",global_index[index],"): ", [global_index[n] for n in nearest]) + +### Extra: scatter-plot global field, first doing a global gather on part 0 +def plot_global(field): + import matplotlib.pyplot as plt + import cartopy.crs as ccrs + + fs = field.functionspace + + ### Create global field and gather + field_global = fs.create_field_global(dtype=np.float64) + functionspace.gather(field, field_global) + + # plot on part 0 + if functionspace.part == 0: + x = np.zeros(grid.size) + y = np.zeros(grid.size) + for i,point in enumerate(grid.lonlat()): + x[i] = point.lon + y[i] = point.lat + z = atlas.make_view(field_global) + fig = plt.figure() + ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree()) + + cntr = ax.scatter(x, y, c=z, s=1, cmap="RdBu_r", vmin=-1, vmax=1,transform=ccrs.PlateCarree()) + + ax.coastlines (resolution='110m', color='k') + ax.set_global() + ax.set_title('global field, %d points' % (grid.size) ) + ax.set_facecolor('gray') + fig.colorbar(cntr, ax=ax) + plt.subplots_adjust(hspace=0.5) + plt.show() + +### Extra: scatter-plot field of one partition +def plot_partition(field): + import matplotlib.pyplot as plt + import cartopy.crs as ccrs + + fs = field.functionspace + lonlat = atlas.make_view(fs.lonlat) + + x = lonlat[:,0] + y = lonlat[:,1] + z = atlas.make_view(field) + + fig = plt.figure() + ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree()) + + cntr = ax.scatter(x, y, c=z, s=1, cmap="RdBu_r", vmin=-1, vmax=1,transform=ccrs.PlateCarree()) + + ax.coastlines (resolution='110m', color='k') + ax.set_global() + ax.set_title('part %d/%d , %d/%d points' % (fs.part, fs.nb_parts, field.size, grid.size) ) + ax.set_facecolor('gray') + fig.colorbar(cntr, ax=ax) + plt.subplots_adjust(hspace=0.5) + plt.show() + +# if functionspace.part == 0: # choose partition to plot here + # plot_partition(field) + +# plot_global(field) + +atlas.finalize() + diff --git a/examples/example1.py b/examples/example1.py new file mode 100755 index 0000000..7a60366 --- /dev/null +++ b/examples/example1.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +import atlas4py as atlas +import math + +atlas.initialize() # Required + +mesh = atlas.Mesh( atlas.Grid("H16") ) + +fs_nodes = atlas.functionspace.NodeColumns(mesh) + +field = fs_nodes.create_field(name="myfield") + +lonlat = atlas.make_view(fs_nodes.lonlat) + +view = atlas.make_view(field) +for n in range(field.shape[0]): + lat = lonlat[n,1] * math.pi / 180. + view[n] = math.cos(4.*lat) + +gmsh = atlas.Gmsh("example1.msh", coordinates='xyz') +gmsh.write(mesh) +gmsh.write(field) + +def plot_pyvista(mesh,field,title): + """ Plot with pyvista only mpi-serial for now""" + import pyvista + ds = atlas.pyvista.dataset_create(mesh, coordinates='xyz', field=field) + pyvista.set_plot_theme('paraview') + pl = pyvista.Plotter() + pl.add_mesh(ds, show_edges=True) + pl.add_title(title) + pl.show(interactive=True) + +# plot_pyvista(mesh,field,'title') + +atlas.finalize() + diff --git a/examples/example2.py b/examples/example2.py new file mode 100755 index 0000000..0b33725 --- /dev/null +++ b/examples/example2.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 + +# Example program that initializes a spectral field with a +# spherical harmonic. Then the inverse transform is performed. +# Then the result can be visualized with Gmsh and pyvista +# gmsh example2.msh + +# ------------------------------------------------------------------------ +# +# Limitations with Trans.backend('ectrans') +# ----------------------------------------- +# 1) Only implemented for global Gaussian grids +# +# ------------------------------------------------------------------------ +# +# Limitations with Trans.backend('local') +# --------------------------------------- +# 1) Only `invtrans` is implemented, and only MPI-serial +# +# ------------------------------------------------------------------------ +# +# Known issues with Trans.backend('local'), to be fixed +# ----------------------------------------------------- +# 1) We need to initialize trans with +# trans = atlas.Trans(grid, truncation) +# instead of +# trans = atlas.Trans(fs_gp, fs_sp) +# 2) Using atlas Field API, only level=0 is supported (rank-1). +# 3) The Healpix grid is identified as unstructured +# because the grid wrongly gets cropped to a "west", and its +# structure gets lost +# +# ------------------------------------------------------------------------ + +import atlas4py as atlas + +# ------------------------------------------------------------------------ +# Functions +# ------------------------------------------------------------------------ +def set_spherical_harmonic(field_sp, n, m): + """ Set field_sp to a spherical harmonic """ + spectral_functionspace = atlas.functionspace.Spectral(field_sp.functionspace) + if not spectral_functionspace: + raise TypeError("'field_sp.functionspace' could not be cast to " + + "'atlas.functionspace.Spectral'. It is of type " + + "'atlas.functionspace."+field_sp.functionspace.type+"'.") + sp = atlas.make_view(field_sp) + + sp[:] = 0. + def spherical_harmonic(re,im,_n,_m): + if _n == n and _m == m: + sp[re] = 1. + sp[im] = 1. + + import numpy.random as random + random.seed(1) + rand1=random.rand(len(sp)) + random.seed(2) + rand2=random.rand(len(sp)) + def spectrum(re,im,_n,_m): + if _m==0: + sp[re] = pow(_n+1,-5/6)*(rand1[re]-0.5) + else: + sp[re] = pow(_n+1,-5/6)*(rand1[re]-0.5) + sp[im] = pow(_n+1,-5/6)*(rand2[im]-0.5) + + + #spectral_functionspace.parallel_for( spectrum ) + spectral_functionspace.parallel_for( spherical_harmonic ) + +# ------------------------------------------------------------------------ + +def plot_pyvista(mesh,field,title): + """ Plot using pyviste, only works with mpi-serial for now """ + import pyvista + ds = atlas.pyvista.dataset_create(mesh, coordinates='xyz', field=field) + pyvista.set_plot_theme('paraview') + pl = pyvista.Plotter() + pl.add_mesh(ds, show_edges=False) + pl.add_title(title) + pl.show(interactive=True) + +# ------------------------------------------------------------------------ + +# ------------------------------------------------------------------------ +# Start here +# ------------------------------------------------------------------------ + +atlas.initialize() # Required + +init_total_wave_number=16 +init_zonal_wave_number=8 + +grid = atlas.Grid("F400") +truncation = 127 +atlas.Trans.backend("ectrans") +partitioner = atlas.Partitioner("ectrans") + # 'ectrans' partitioner needs to be used for 'ectrans' Trans backend only. + # Partitions are nearly identical to the equal_regions partitioner + +if not atlas.GaussianGrid(grid) or not atlas.Trans.has_backend("ectrans"): + # Fallback to "local" backend + atlas.Trans.backend("local") + partitioner = atlas.Partitioner("equal_regions") +atlas.Trans.backend("local") +partitioner = atlas.Partitioner("equal_regions") + +# Create function spaces +fs_sp = atlas.functionspace.Spectral(truncation) +fs_gp = atlas.functionspace.StructuredColumns(grid, partitioner, halo=1) + +# Create fields (already distributed) +field_sp = fs_sp.create_field(name="sp") +field_gp = fs_gp.create_field(name="gp") + +# Initial condition for spectral field +set_spherical_harmonic(field_sp, n=init_total_wave_number, m=init_zonal_wave_number) + +# Spectral transform +trans = atlas.Trans(grid, truncation) +trans.invtrans(field_sp, field_gp) + +# Visualisation of gridpoint field +mesh = atlas.MeshGenerator(type='structured').generate(grid) +atlas.Gmsh("example2.msh", coordinates='xyz').write(mesh).write(field_gp) + +# plot_pyvista(mesh, field_gp, title=grid.name + " - T" + str(truncation) + "; n="+str(init_total_wave_number)+" m="+str(init_zonal_wave_number)) +# --> only works with mpi-serial for now + +atlas.finalize() +# ------------------------------------------------------------------------ diff --git a/setup.py b/setup.py index eeb22a3..aec8952 100644 --- a/setup.py +++ b/setup.py @@ -11,10 +11,10 @@ VERSIONS = dict( cmake="3.14.0", - ecbuild="3.6.3", - eckit="1.17.1", - atlas="0.26.0", - pybind11="2.5.0", + ecbuild="3.8.0", + eckit="1.24.0", + atlas="0.35.1", + pybind11="2.11.1", python="3.6", ) @@ -121,7 +121,7 @@ def build_extension(self, ext): subprocess.check_call(["cmake", "--build", "."] + build_args, cwd=self.build_temp) -PACKAGE_VERSION = "0.26.0.dev14" +PACKAGE_VERSION = "0.35.1.dev14" # Meaning of the version scheme "{major}.{minor}.{patch}.dev{dev}": # - {major}.{minor}.{patch} => version of the atlas C++ library (hardcoded in 'setup.py') # - {dev} => version of the Python bindings as the commit number in 'master' diff --git a/src/atlas4py/CMakeLists.txt b/src/atlas4py/CMakeLists.txt index 15d8134..a37fb76 100644 --- a/src/atlas4py/CMakeLists.txt +++ b/src/atlas4py/CMakeLists.txt @@ -1,20 +1,16 @@ + +if( NOT ATLAS4PY_CMAKE_MINIMUM_REQUIRED_VERSION) + set(ATLAS4PY_CMAKE_MINIMUM_REQUIRED_VERSION 3.14) +endif() + cmake_minimum_required(VERSION ${ATLAS4PY_CMAKE_MINIMUM_REQUIRED_VERSION}) project(atlas4py LANGUAGES CXX) +cmake_policy(SET CMP0074 NEW) set(CMAKE_CXX_STANDARD 17) include(FetchContent) -macro(find_ecbuild) - if(NOT ecbuild_FOUND) - FetchContent_Declare( - ecbuild - GIT_REPOSITORY https://github.com/ecmwf/ecbuild.git - GIT_TAG ${ATLAS4PY_ECBUILD_VERSION} - ) - FetchContent_MakeAvailable(ecbuild) - endif() -endmacro() function(copy_target ct_tgt) add_custom_target(copy_${ct_tgt} ALL @@ -24,29 +20,45 @@ function(copy_target ct_tgt) endfunction() find_package(atlas QUIET PATHS $ENV{ATLAS_INSTALL_DIR}) +if( atlas_FOUND ) + message( "Found atlas: ${atlas_DIR} (found version \"${atlas_VERSION}\")" ) +endif() + if(NOT atlas_FOUND) - find_package(eckit QUIET) + find_package(ecbuild) + if(NOT ecbuild_FOUND) + FetchContent_Declare( + ecbuild + GIT_REPOSITORY https://github.com/ecmwf/ecbuild.git + GIT_TAG ${ATLAS4PY_ECBUILD_VERSION} + ) + FetchContent_MakeAvailable(ecbuild) + endif() + find_package(eckit) + if( eckit_FOUND ) + message( "Found eckit: ${eckit_DIR} (found version \"${eckit_VERSION}\")" ) + endif() + if(NOT eckit_FOUND) - find_ecbuild() FetchContent_Declare( eckit GIT_REPOSITORY https://github.com/ecmwf/eckit.git GIT_TAG ${ATLAS4PY_ECKIT_VERSION} ) FetchContent_MakeAvailable(eckit) - set(_atlas4py_built_eckit ON) endif() - find_ecbuild() FetchContent_Declare( atlas GIT_REPOSITORY https://github.com/ecmwf/atlas.git GIT_TAG ${ATLAS4PY_ATLAS_VERSION} ) + set( ENABLE_GRIDTOOLS_STORAGE OFF CACHE BOOL "" FORCE ) FetchContent_MakeAvailable(atlas) copy_target(atlas) + copy_target(atlas_io) if(_atlas4py_built_eckit) # copy atlas' eckit dependencies @@ -62,12 +74,15 @@ if(NOT atlas_FOUND) endif() endif() -FetchContent_Declare( - pybind11 - GIT_REPOSITORY https://github.com/pybind/pybind11.git - GIT_TAG v${ATLAS4PY_PYBIND11_VERSION} -) -FetchContent_MakeAvailable(pybind11) +find_package( pybind11 ) +if( NOT pybind11_FOUND ) + FetchContent_Declare( + pybind11 + GIT_REPOSITORY https://github.com/pybind/pybind11.git + GIT_TAG v${ATLAS4PY_PYBIND11_VERSION} + ) + FetchContent_MakeAvailable(pybind11) +endif() pybind11_add_module(_atlas4py _atlas4py.cpp) -target_link_libraries(_atlas4py PUBLIC atlas eckit) +target_link_libraries(_atlas4py PUBLIC atlas) diff --git a/src/atlas4py/__init__.py b/src/atlas4py/__init__.py index ffa06e6..d36b5c1 100644 --- a/src/atlas4py/__init__.py +++ b/src/atlas4py/__init__.py @@ -6,3 +6,10 @@ from ._version import __version__ from ._atlas4py import * + +from .pyvista import * + +def make_view(field): + import numpy as np + return np.array(field, copy=False) + diff --git a/src/atlas4py/_atlas4py.cpp b/src/atlas4py/_atlas4py.cpp index 91f6f92..9dddfb7 100644 --- a/src/atlas4py/_atlas4py.cpp +++ b/src/atlas4py/_atlas4py.cpp @@ -16,9 +16,16 @@ #include "atlas/meshgenerator.h" #include "atlas/option/Options.h" #include "atlas/output/Gmsh.h" +#include "atlas/library.h" +#include "atlas/trans/Trans.h" +#include "atlas/interpolation.h" +#include "atlas/util/function/VortexRollup.h" +#include "atlas/util/function/SphericalHarmonic.h" + #include "eckit/value/Value.h" #include "eckit/config/Configuration.h" + namespace py = ::pybind11; using namespace atlas; using namespace pybind11::literals; @@ -36,6 +43,57 @@ struct type_caster : public type_caster( value ) ) { + config.set(key,value.cast()); + } + else if ( py::isinstance( value ) ) { + config.set(key,value.cast()); + } + else if ( py::isinstance( value ) ) { + config.set(key,value.cast()); + } + else if ( py::isinstance( value ) ) { + config.set(key, value.cast()); + } + else { + throw std::out_of_range( "type of value unsupported" ); + } +} + +util::Config to_config( py::kwargs kwargs ) { + util::Config config; + for( const auto& pair : kwargs ) { + const auto key = pair.first.cast(); + const auto& value = pair.second; + config_set(config, key, value); + } + return config; +} + py::object toPyObject( eckit::Value const& v ) { if ( v.isBool() ) return py::bool_( v.as() ); @@ -94,10 +152,32 @@ array::DataType pybindToAtlas( py::dtype const& dtype ) { return { 0 }; } + } // namespace PYBIND11_MODULE( _atlas4py, m ) { - py::class_( m, "PointLonLat" ) + auto m_library = m.def_submodule( "library" ); + m_library.def("initialize", []() { atlas::initialise(PySys::instance().argc, PySys::instance().argv);}) + .def("initialise", []() { atlas::initialise(PySys::instance().argc, PySys::instance().argv);}) + .def("finalize", []() { atlas::finalize(); }) + .def("finalise", []() { atlas::finalize(); }); + m_library.attr("version") = atlas::Library::instance().version(); + + m.def("initialize", []() { atlas::initialise(PySys::instance().argc, PySys::instance().argv);}) + .def("initialise", []() { atlas::initialise(PySys::instance().argc, PySys::instance().argv);}) + .def("finalize", []() { atlas::finalize(); }) + .def("finalise", []() { atlas::finalize(); }); + + py::class_( m, "Point2" ) + .def( py::init( []( double x, double y ) { + return Point2( { x, y } ); + } ) ) + .def( "__getitem__", &Point2::operator() ) + .def( "__repr__", []( Point2 const& p ) { + return "_atlas4py.Point2(x=" + std::to_string( p.x() ) + ", y=" + std::to_string( p.y() ) + ")"; + } ); + + py::class_( m, "PointLonLat" ) .def( py::init( []( double lon, double lat ) { return PointLonLat( { lon, lat } ); } ), @@ -108,17 +188,26 @@ PYBIND11_MODULE( _atlas4py, m ) { return "_atlas4py.PointLonLat(lon=" + std::to_string( p.lon() ) + ", lat=" + std::to_string( p.lat() ) + ")"; } ); - py::class_( m, "PointXY" ) - .def( py::init( []( double x, double y ) { - return PointXY( { x, y } ); - } ), - "x"_a, "y"_a ) - .def_property_readonly( "x", py::overload_cast<>( &PointXY::x, py::const_ ) ) - .def_property_readonly( "y", py::overload_cast<>( &PointXY::y, py::const_ ) ) + py::class_( m, "PointXY" ) .def( "__repr__", []( PointXY const& p ) { return "_atlas4py.PointXY(x=" + std::to_string( p.x() ) + ", y=" + std::to_string( p.y() ) + ")"; } ); + py::class_( m, "Point3" ) + .def( py::init( []( double x, double y, double z ) { + return Point3(x, y, z); + } ) ) + .def( "__getitem__", &Point3::operator() ) + .def( "__repr__", []( Point3 const& p ) { + return "_atlas4py.Point3(" + std::to_string( p(0) ) + ", " + std::to_string( p(1) ) + ", " + std::to_string( p(2) ) + ")"; + } ); + + py::class_( m, "PointXYZ" ) + .def( "__repr__", []( PointXYZ const& p ) { + return "_atlas4py.PointXYZ(x=" + std::to_string( p.x() ) + ", y=" + std::to_string( p.y() ) + ", z=" + std::to_string( p.z() ) + ")"; + } ); + + py::class_( m, "Projection" ).def( "__repr__", []( Projection const& p ) { return "_atlas4py.Projection("_s + py::str( toPyObject( p.spec().get() ) ) + ")"_s; } ); @@ -132,21 +221,61 @@ PYBIND11_MODULE( _atlas4py, m ) { py::class_( m, "RectangularDomain" ) .def( py::init( []( std::tuple xInterval, std::tuple yInterval ) { auto [xFrom, xTo] = xInterval; - auto [yFrom, yTo] = xInterval; + auto [yFrom, yTo] = yInterval; return RectangularDomain( { xFrom, xTo }, { yFrom, yTo } ); } ), "x_interval"_a, "y_interval"_a ); py::class_( m, "Grid" ) .def( py::init() ) + .def( py::init( []( const std::string& name, const Domain& domain ) { return Grid(name,domain); } ) ) .def_property_readonly( "name", &Grid::name ) .def_property_readonly( "uid", &Grid::uid ) .def_property_readonly( "size", &Grid::size ) .def_property_readonly( "projection", &Grid::projection ) .def_property_readonly( "domain", &Grid::domain ) + .def( "lonlat", [](Grid const& self) { + return self.lonlat().begin(); + }) .def( "__repr__", []( Grid const& g ) { return "_atlas4py.Grid("_s + py::str( toPyObject( g.spec().get() ) ) + ")"_s; } ); + py::class_(m, "IteratorLonLat") + .def("__iter__", [](grid::IteratorLonLat& it) { return it; }) + .def("__next__", [](grid::IteratorLonLat& it) { + PointLonLat p; + if( !it.next(p) ) { + throw py::stop_iteration(); + } + return p;}); + + py::class_( m, "UnstructuredGrid" ) + .def( py::init( [](py::array_t _xy) { + py::buffer_info xy = _xy.request(); + auto xy_data = _xy.unchecked<2>(); + auto points = new std::vector(xy.size/2); + auto& p = *points; + for(size_t n=0; n _x, py::array_t _y) { + py::buffer_info x = _x.request(); + py::buffer_info y = _y.request(); + auto x_data = _x.unchecked<1>(); + auto y_data = _y.unchecked<1>(); + ATLAS_ASSERT(x.size == y.size); + auto points = new std::vector(x.size); + auto& p = *points; + for(size_t n=0; n( m, "Spacing" ) .def( "__len__", &grid::Spacing::size ) .def( "__getitem__", &grid::Spacing::operator[]) @@ -189,40 +318,55 @@ PYBIND11_MODULE( _atlas4py, m ) { .def_property_readonly( "regular", &StructuredGrid::regular ) .def_property_readonly( "periodic", &StructuredGrid::periodic ); + py::class_( m, "GaussianGrid" ) + .def( py::init( []( Grid const& g ) { + return GaussianGrid{ g }; + } ), + "grid"_a ) + .def_property_readonly( "valid", &GaussianGrid::valid ) + .def("__bool__", &GaussianGrid::valid); + py::class_( m, "eckit.Configuration" ); py::class_( m, "eckit.LocalConfiguration" ); // TODO This is a duplicate of metadata below (because same base class) py::class_( m, "Config" ) .def( py::init() ) + .def( py::init( []( py::kwargs kwargs) { + return to_config(kwargs); + } ) ) .def( "__setitem__", []( util::Config& config, std::string const& key, py::object value ) { - if ( py::isinstance( value ) ) - config.set( key, value.cast() ); - else if ( py::isinstance( value ) ) - config.set( key, value.cast() ); - else if ( py::isinstance( value ) ) - config.set( key, value.cast() ); - else if ( py::isinstance( value ) ) - config.set( key, value.cast() ); - else - throw std::out_of_range( "type of value unsupported" ); + config_set(config,key,value); } ) .def( "__getitem__", []( util::Config& config, std::string const& key ) -> py::object { if ( !config.has( key ) ) throw std::out_of_range( "key <" + key + "> could not be found" ); - - // TODO: We have to query metadata.get() even though this should - // not be done (see comment in Config::get). We cannot - // avoid this right now because otherwise we cannot query - // the type of the underlying data. return toPyObject( config.get().element( key ) ); } ) .def( "__repr__", []( util::Config const& config ) { return "_atlas4py.Config("_s + py::str( toPyObject( config.get() ) ) + ")"_s; } ); + py::class_( m, "Partitioner" ) + .def( py::init( []( py::kwargs kwargs ) { return grid::Partitioner( to_config(kwargs)); } ) ) + .def( py::init( []( util::Config const& config ) { return grid::Partitioner( config ); } ) ) + .def( py::init( []( const std::string& type ) { return grid::Partitioner(type); } ) ) + .def( py::init( [](){ return grid::Partitioner(); } ) ); + + py::class_( m, "MatchingPartitioner" ) + .def( py::init( []( Mesh const& mesh, py::kwargs kwargs ) { return grid::MatchingPartitioner(mesh, to_config(kwargs)); } ) ) + .def( py::init( []( FunctionSpace const& functionspace, py::kwargs kwargs ) { return grid::MatchingPartitioner(functionspace, to_config(kwargs)); } ) ); + + py::class_( m, "MeshGenerator" ) + .def( py::init( []( py::kwargs kwargs ) { return MeshGenerator( to_config(kwargs)); } ) ) + .def( py::init( []( util::Config const& config ) { return MeshGenerator( config ); } ) ) + .def( py::init( []( const std::string& type ) { return MeshGenerator(type); } ) ) + .def( py::init( [](){ return MeshGenerator(); } ) ) + .def( "generate", [](MeshGenerator const& self, Grid const& grid){ return self.generate(grid); } ) + .def( "generate", [](MeshGenerator const& self, Grid const& grid, grid::Partitioner const& partitioner ){ return self.generate(grid, partitioner); } ); + py::class_( m, "StructuredMeshGenerator" ) // TODO in FunctionSpace below we expose config options, not the whole config object .def( py::init( []( util::Config const& config ) { return StructuredMeshGenerator( config ); } ) ) @@ -230,6 +374,8 @@ PYBIND11_MODULE( _atlas4py, m ) { .def( "generate", py::overload_cast( &StructuredMeshGenerator::generate, py::const_ ) ); py::class_( m, "Mesh" ) + .def( py::init( []( const Grid& grid ) { return Mesh(grid); } ) ) + .def( py::init( []( const Grid& grid, const grid::Partitioner& partitioner ) { return Mesh(grid,partitioner); } ) ) .def_property_readonly( "grid", &Mesh::grid ) .def_property_readonly( "projection", &Mesh::projection ) .def_property( "nodes", py::overload_cast<>( &Mesh::nodes, py::const_ ), py::overload_cast<>( &Mesh::nodes ) ) @@ -295,6 +441,7 @@ PYBIND11_MODULE( _atlas4py, m ) { .def_property_readonly( "cell_connectivity", py::overload_cast<>( &mesh::Nodes::cell_connectivity, py::const_ ) ) .def_property_readonly( "lonlat", py::overload_cast<>( &Mesh::Nodes::lonlat, py::const_ ) ) + .def_property_readonly( "xy", py::overload_cast<>( &Mesh::Nodes::xy, py::const_ ) ) .def("field", []( mesh::Nodes const& n, std::string const& name ) { return n.field( name ); }, "name"_a, py::return_value_policy::reference_internal ) .def( "flags", []( mesh::Nodes const& n ) { return n.flags(); }, @@ -316,55 +463,112 @@ PYBIND11_MODULE( _atlas4py, m ) { .def( "flags", []( mesh::HybridElements const& he ) { return he.flags(); }, py::return_value_policy::reference_internal ); - auto m_fs = m.def_submodule( "functionspace" ); py::class_( m_fs, "FunctionSpace" ) .def_property_readonly( "size", &FunctionSpace::size ) .def_property_readonly( "type", &FunctionSpace::type ) .def( "create_field", - []( FunctionSpace const& fs, std::optional const& name, std::optional levels, - std::optional variables, py::object dtype ) { - util::Config config; - if ( name ) - config = config | option::name( *name ); - // TODO what does it mean in atlas if levels is not set? - if ( levels ) - config = config | option::levels( *levels ); - if ( variables ) - config = config | option::variables( *variables ); - config = config | option::datatype( pybindToAtlas( py::dtype::from_args( dtype ) ) ); + []( FunctionSpace const& fs, std::optional dtype, py::kwargs kwargs ) { + util::Config config = to_config(kwargs); + if ( dtype ) + config.set( option::datatype( pybindToAtlas( py::dtype::from_args( *dtype ) ) )); + else + config.set( option::datatypeT() ); + return fs.createField( config ); + }, + "dtype"_a = std::nullopt) + .def( + "create_field_global", + []( FunctionSpace const& fs, std::optional dtype, py::kwargs kwargs ) { + util::Config config = to_config(kwargs); + config.set("global",true); + if ( dtype ) + config.set( option::datatype( pybindToAtlas( py::dtype::from_args( *dtype ) ) )); + else + config.set( option::datatypeT() ); return fs.createField( config ); }, - "name"_a = std::nullopt, "levels"_a = std::nullopt, "variables"_a = std::nullopt, "dtype"_a ); + "dtype"_a = std::nullopt) + .def_property_readonly("lonlat", &FunctionSpace::lonlat ) + .def_property_readonly("ghost", &FunctionSpace::ghost ) + .def_property_readonly("remote_index", &FunctionSpace::remote_index ) + .def_property_readonly("partition", &FunctionSpace::partition ) + .def_property_readonly("global_index", &FunctionSpace::global_index ) + .def_property_readonly("part", &FunctionSpace::part ) + .def_property_readonly("nb_parts", &FunctionSpace::nb_parts ) + .def("gather", [](FunctionSpace const& fs, Field const& local, Field& global) { + return fs.gather(local,global); + }) + .def("scatter", [](FunctionSpace const& fs, Field const& global, Field& local) { + return fs.scatter(global,local); + }); + py::class_( m_fs, "EdgeColumns" ) - .def( py::init( []( Mesh const& m, int halo ) { - return functionspace::EdgeColumns( m, util::Config()( "halo", halo ) ); + .def( py::init( []( Mesh const& m, int halo, int levels ) { + return functionspace::EdgeColumns( m, util::Config()( "halo", halo )("levels", levels) ); } ), - "mesh"_a, "halo"_a = 0 ) + "mesh"_a, "halo"_a = 0, "levels"_a = 0 ) .def_property_readonly( "nb_edges", &functionspace::EdgeColumns::nb_edges ) .def_property_readonly( "mesh", &functionspace::EdgeColumns::mesh ) .def_property_readonly( "edges", &functionspace::EdgeColumns::edges ) .def_property_readonly( "valid", &functionspace::EdgeColumns::valid ); py::class_( m_fs, "NodeColumns" ) - .def( py::init( []( Mesh const& m, int halo ) { - return functionspace::NodeColumns( m, util::Config()( "halo", halo ) ); + .def( py::init( []( FunctionSpace fs ) { return functionspace::NodeColumns{fs}; } ) ) + .def( py::init( []( Mesh const& m, int halo, int levels ) { + return functionspace::NodeColumns( m, util::Config()( "halo", halo )("levels",levels) ); } ), - "mesh"_a, "halo"_a = 0 ) + "mesh"_a, "halo"_a = 0, "levels"_a = 0 ) .def_property_readonly( "nb_nodes", &functionspace::NodeColumns::nb_nodes ) .def_property_readonly( "mesh", &functionspace::NodeColumns::mesh ) .def_property_readonly( "nodes", &functionspace::NodeColumns::nodes ) - .def_property_readonly( "valid", &functionspace::NodeColumns::valid ); + .def_property_readonly( "valid", &functionspace::NodeColumns::valid ) + .def( "__bool__", [](const functionspace::NodeColumns& self) { return self.valid(); } ); + py::class_( m_fs, "CellColumns" ) - .def( py::init( []( Mesh const& m, int halo ) { - return functionspace::CellColumns( m, util::Config()( "halo", halo ) ); + .def( py::init( []( Mesh const& m, int halo, int levels ) { + return functionspace::CellColumns( m, util::Config()( "halo", halo )("levels",levels) ); } ), - "mesh"_a, "halo"_a = 0 ) + "mesh"_a, "halo"_a = 0, "levels"_a = 0 ) .def_property_readonly( "nb_cells", &functionspace::CellColumns::nb_cells ) .def_property_readonly( "mesh", &functionspace::CellColumns::mesh ) .def_property_readonly( "cells", &functionspace::CellColumns::cells ) .def_property_readonly( "valid", &functionspace::CellColumns::valid ); + py::class_( m_fs, "Spectral" ) + .def( py::init( []( FunctionSpace fs ) { return functionspace::Spectral{fs}; } ) ) + .def( py::init( []( int truncation ) { return functionspace::Spectral( truncation ); } ), "truncation"_a ) + .def_property_readonly( "nb_spectral_coefficients", &functionspace::Spectral::nb_spectral_coefficients ) + .def_property_readonly( "nb_spectral_coefficients_global", &functionspace::Spectral::nb_spectral_coefficients_global ) + .def_property_readonly( "truncation", &functionspace::Spectral::truncation ) + .def_property_readonly( "valid", &functionspace::Spectral::valid ) + .def( "__bool__", [](const functionspace::Spectral& self) { return self.valid(); } ) + .def("parallel_for", []( const functionspace::Spectral& self, const py::function &f) { + self.parallel_for>(f);}); + + + py::class_( m_fs, "StructuredColumns" ) + .def( py::init( []( Grid const& g, grid::Partitioner const& p, py::kwargs kwargs ) { + return functionspace::StructuredColumns( g, p, to_config(kwargs) ); } ), + "grid"_a, "partitioner"_a ) + .def( py::init( []( Grid const& g, py::kwargs kwargs ) { return functionspace::StructuredColumns( g, to_config(kwargs) ); } ), "grid"_a ) + .def_property_readonly( "grid", &functionspace::StructuredColumns::grid ) + .def_property_readonly( "valid", &functionspace::StructuredColumns::valid ); + + py::class_( m_fs, "PointCloud" ) + .def( py::init( []( FunctionSpace fs ) { return functionspace::PointCloud{fs}; } ) ) + .def( py::init( []( Grid const& grid, py::kwargs kwargs) { + return functionspace::PointCloud( grid, to_config(kwargs) ); + } ), + "grid"_a ) + .def( py::init( []( Grid const& grid, atlas::grid::Partitioner const& partitioner, py::kwargs kwargs) { + return functionspace::PointCloud( grid, partitioner, to_config(kwargs) ); + } ), + "grid"_a, "partitioner"_a) + .def_property_readonly( "valid", &functionspace::PointCloud::valid ) + .def( "__bool__", [](const functionspace::PointCloud& self) { return self.valid(); } ); + + py::class_( m, "Metadata" ) .def_property_readonly( "keys", &util::Metadata::keys ) .def( "__setitem__", @@ -384,11 +588,6 @@ PYBIND11_MODULE( _atlas4py, m ) { []( util::Metadata& metadata, std::string const& key ) -> py::object { if ( !metadata.has( key ) ) throw std::out_of_range( "key <" + key + "> could not be found" ); - - // TODO: We have to query metadata.get() even though this should - // not be done (see comment in Config::get). We cannot - // avoid this right now because otherwise we cannot query - // the type of the underlying data. return toPyObject( metadata.get().element( key ) ); } ) .def( "__repr__", []( util::Metadata const& metadata ) { @@ -401,9 +600,13 @@ PYBIND11_MODULE( _atlas4py, m ) { .def_property_readonly( "shape", py::overload_cast<>( &Field::shape, py::const_ ) ) .def_property_readonly( "size", &Field::size ) .def_property_readonly( "rank", &Field::rank ) + .def_property_readonly( "levels", &Field::levels ) .def_property_readonly( "datatype", []( Field& f ) { return atlasToPybind( f.datatype() ); } ) .def_property( "metadata", py::overload_cast<>( &Field::metadata, py::const_ ), py::overload_cast<>( &Field::metadata ) ) + .def_property_readonly( "functionspace", py::overload_cast<>( &Field::functionspace, py::const_ ) ) + .def( "halo_exchange", []( Field& f) { f.haloExchange(); }) + .def_property( "halo_dirty", &Field::dirty, &Field::set_dirty, py::return_value_policy::copy) .def_buffer( []( Field& f ) { auto strides = f.strides(); std::transform( strides.begin(), strides.end(), strides.begin(), @@ -433,14 +636,135 @@ PYBIND11_MODULE( _atlas4py, m ) { py::class_( m, "Gmsh" ) .def( py::init( []( std::string const& path ) { return output::Gmsh{ path }; } ), "path"_a ) + .def( py::init( []( std::string const& path, eckit::Configuration const& config, py::kwargs kwargs ) { + util::Config cfg = util::Config(config); + cfg.set(to_config(kwargs)); + return output::Gmsh(path,cfg); + }), "path"_a, "config"_a ) + .def( py::init( []( std::string const& path, py::kwargs kwargs ) {return output::Gmsh{ path, to_config(kwargs) }; } ), "path"_a ) .def( "__enter__", []( output::Gmsh& gmsh ) { return gmsh; } ) .def( "__exit__", []( output::Gmsh& gmsh, py::object exc_type, py::object exc_val, py::object exc_tb ) { gmsh.reset( nullptr ); } ) .def( - "write", []( output::Gmsh& gmsh, Mesh const& mesh ) { gmsh.write( mesh ); }, "mesh"_a ) + "write", []( output::Gmsh& gmsh, Mesh const& mesh ) { gmsh.write( mesh ); return gmsh; }, "mesh"_a ) .def( - "write", []( output::Gmsh& gmsh, Field const& field ) { gmsh.write( field ); }, "field"_a ) + "write", []( output::Gmsh& gmsh, Field const& field ) { gmsh.write( field ); return gmsh;}, "field"_a ) .def( - "write", []( output::Gmsh& gmsh, Field const& field, FunctionSpace const& fs ) { gmsh.write( field, fs ); }, + "write", []( output::Gmsh& gmsh, Field const& field, FunctionSpace const& fs ) { gmsh.write( field, fs ); return gmsh; }, "field"_a, "functionspace"_a ); + + + py::class_( m, "Trans" ) + .def( py::init( [](const FunctionSpace& gp, const FunctionSpace& sp, py::kwargs kwargs){ return trans::Trans(gp,sp,to_config(kwargs)); } ), "gp"_a, "sp"_a ) + .def( py::init( [](const Grid& grid, int truncation, py::kwargs kwargs){ return trans::Trans(grid,truncation,to_config(kwargs));} ), "grid"_a, "truncation"_a ) + .def( "dirtrans", []( trans::Trans& trans, const Field& gpfield, Field& spfield) { trans.dirtrans(gpfield,spfield);} ) + .def( "invtrans", []( trans::Trans& trans, const Field& spfield, Field& gpfield) { trans.invtrans(spfield,gpfield);} ) + .def_property_readonly( "truncation", &trans::Trans::truncation ) + .def_property_readonly( "nb_spectral_coefficients", &trans::Trans::spectralCoefficients ) + .def_static("backend", [] ( const std::string& backend ){ trans::Trans::backend(backend); } ) + .def_static("has_backend", [] ( const std::string& backend ){ return trans::Trans::hasBackend(backend); } ); + + py::class_( m, "Interpolation" ) + .def( py::init( [](const std::string& type, const FunctionSpace& source, const FunctionSpace& target, py::kwargs kwargs){ + auto config = to_config(kwargs); + config.set("type",type); + return Interpolation(config,source,target); + } ), "type"_a, "source"_a, "target"_a ) + .def( py::init( [](const std::string& type, const Grid& source, const Grid& target, py::kwargs kwargs){ + auto config = to_config(kwargs); + config.set("type",type); + return Interpolation(config,source,target); + } ), "type"_a, "source"_a, "target"_a ) + .def( "execute", []( Interpolation const& self, const Field& source, Field& target) { return self.execute(source,target);} ) + .def_property_readonly( "source", &Interpolation::source ) + .def_property_readonly( "target", &Interpolation::target ); + + + py::class_( m, "IndexKDTreeValue" ) + .def_property_readonly( "point", [](const util::IndexKDTree::Value& v) { return v.point(); } ) + .def_property_readonly( "payload", [](const util::IndexKDTree::Value& v) { return v.payload(); } ) + .def_property_readonly( "index", [](const util::IndexKDTree::Value& v) { return v.payload(); } ) + .def_property_readonly( "distance", [](const util::IndexKDTree::Value& v) { return v.distance(); } ) + ; + + py::class_( m, "IndexKDTreeValueList" ) + .def_property_readonly( "indices", [](const util::IndexKDTree::ValueList& v) { return v.payloads(); } ) + .def( "__getitem__", [](const util::IndexKDTree::ValueList& v, size_t i) { return v[i]; } ) + .def( "size", [](const util::IndexKDTree::ValueList& v) { return v.size(); } ) + ; + + py::class_( m, "IndexKDTree" ) + .def( py::init([](){ return util::IndexKDTree(); })) + .def( py::init([](const std::string& geometry){ return util::IndexKDTree(util::Config("geometry",geometry)); }), "geometry"_a) + .def( "reserve", [](util::IndexKDTree& self, idx_t size) { self.reserve(size); }) + .def( "insert", [](util::IndexKDTree& self, const PointLonLat& point, util::IndexKDTree::Payload payload){ self.insert(point, payload); }) + .def( "insert", [](util::IndexKDTree& self, const PointXYZ& point, util::IndexKDTree::Payload payload){ self.insert(point, payload); }) + .def( "build", [](util::IndexKDTree& self) { self.build();} ) + .def( "build", [](util::IndexKDTree& self, py::array_t points) { + py::buffer_info info = points.request(); + auto size = info.shape[0]; + self.reserve(size); + if (info.ndim == 2 ) { + const PointLonLat* lonlat = reinterpret_cast(info.ptr); + for( size_t j=0; j(info.ptr); + for( size_t j=0; j