diff --git a/docs/user_guide/examples/tutorial_interaction.ipynb b/docs/user_guide/examples/tutorial_interaction.ipynb new file mode 100644 index 0000000000..47f10e9d54 --- /dev/null +++ b/docs/user_guide/examples/tutorial_interaction.ipynb @@ -0,0 +1,428 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# 🎓 Particle-Particle interaction\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "In this tutorial, we show an example of the 'particle-particle-interaction' functionality in Parcels. Note that this functionality is still fairly rudimentary for now. Importantly, as particle-interaction is a many-body problem it scales as $N^2$ and can thus become very slow for large ParticleSets. There is currently **no advanced optimisation** (such as using techniques from [Smoothed Particle Hydrodynamics](https://en.wikipedia.org/wiki/Smoothed-particle_hydrodynamics)) built in to Parcels. " + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "2", + "metadata": {}, + "source": [ + "## Pulling particles\n", + "\n", + "Below is an example of what can be done with particle-particle interaction. We create a square grid of $N\\times N$ particles, which are all subject to Brownian Motion (via the built-in `DiffusionUniformKh` Kernel). Furthermore, two of the particles also 'attract' other particles that are within the interaction distance: these attracted particles move with a constant velocity to the attracting particles.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import xarray as xr\n", + "from matplotlib.animation import FuncAnimation\n", + "\n", + "import parcels\n", + "\n", + "# for interactive display of animations\n", + "plt.rcParams[\"animation.html\"] = \"jshtml\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "def Pull(particles, fieldset):\n", + " \"\"\"Kernel that \"pulls\" all neighbour particles\n", + " toward the attracting particle with a constant velocity\"\"\"\n", + " interaction_distance = 0.5\n", + " velocity = -0.04 # predefined attracting velocity\n", + "\n", + " # Boolean mask and coordinates of attractors\n", + " attractor_mask = particles.attractor.astype(bool)\n", + " lon_a = particles.lon[attractor_mask]\n", + " lat_a = particles.lat[attractor_mask]\n", + "\n", + " # Pairwise differences and distances (n_attractors × n_particles)\n", + " dx = particles.lon - lon_a[:, None]\n", + " dy = particles.lat - lat_a[:, None]\n", + " distances = np.sqrt(dx**2 + dy**2)\n", + "\n", + " # Mask dx, dy by interaction range\n", + " within = distances < interaction_distance\n", + " dx = np.where(within, dx, 0.0)\n", + " dy = np.where(within, dy, 0.0)\n", + "\n", + " with np.errstate(divide=\"ignore\", invalid=\"ignore\"):\n", + " inv_dist = np.where(distances > 0, 1.0 / distances, 0.0)\n", + " dx_norm = dx * inv_dist\n", + " dy_norm = dy * inv_dist\n", + "\n", + " particles.dlon += np.sum(dx_norm, axis=0) * velocity * particles.dt\n", + " particles.dlat += np.sum(dy_norm, axis=0) * velocity * particles.dt" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "def DiffusionFieldSet():\n", + " \"\"\"Define a fieldset with only diffusion\"\"\"\n", + " fieldset = parcels.FieldSet([])\n", + " fieldset.add_constant_field(\"Kh_zonal\", 0.0005, mesh=\"flat\")\n", + " fieldset.add_constant_field(\"Kh_meridional\", 0.0005, mesh=\"flat\")\n", + " return fieldset" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "npart = 11\n", + "X, Y = np.meshgrid(np.linspace(-1, 1, npart), np.linspace(-1, 1, npart))\n", + "\n", + "# Create custom particle class with extra variable that indicates\n", + "# whether the interaction kernel should be executed on this particle.\n", + "InteractingParticle = parcels.Particle.add_variable(\n", + " parcels.Variable(\"attractor\", dtype=np.bool_, to_write=\"once\"),\n", + ")\n", + "\n", + "attractor = [\n", + " True if i in [int(npart * npart / 2 - 3), int(npart * npart / 2 + 3)] else False\n", + " for i in range(npart * npart)\n", + "]\n", + "\n", + "pset = parcels.ParticleSet(\n", + " fieldset=DiffusionFieldSet(),\n", + " pclass=InteractingParticle,\n", + " lon=X,\n", + " lat=Y,\n", + " attractor=attractor,\n", + ")\n", + "\n", + "output_file = parcels.ParticleFile(\n", + " store=\"InteractingParticles.zarr\",\n", + " outputdt=np.timedelta64(1, \"s\"),\n", + ")\n", + "\n", + "kernels = [\n", + " parcels.kernels.DiffusionUniformKh,\n", + " Pull, # Add the Pull kernel defined above\n", + "]\n", + "\n", + "pset.execute(\n", + " pyfunc=kernels,\n", + " runtime=np.timedelta64(60, \"s\"),\n", + " dt=np.timedelta64(1, \"s\"),\n", + " output_file=output_file,\n", + " verbose_progress=False,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "data_xarray = xr.open_zarr(\"InteractingParticles.zarr\")\n", + "data_attr = data_xarray.where(data_xarray[\"attractor\"].compute() == 1, drop=True)\n", + "data_other = data_xarray.where(data_xarray[\"attractor\"].compute() == 0, drop=True)\n", + "\n", + "timerange = np.arange(\n", + " np.nanmin(data_xarray[\"time\"].values),\n", + " np.nanmax(data_xarray[\"time\"].values),\n", + " np.timedelta64(1, \"s\"),\n", + ")\n", + "\n", + "fig = plt.figure(figsize=(4, 4), constrained_layout=True)\n", + "ax = fig.add_subplot()\n", + "\n", + "ax.set_ylabel(\"Meridional distance [m]\")\n", + "ax.set_xlabel(\"Zonal distance [m]\")\n", + "ax.set_xlim(-1.1, 1.1)\n", + "ax.set_ylim(-1.1, 1.1)\n", + "\n", + "time_id = np.where(data_other[\"time\"] == timerange[0])\n", + "time_id_attr = np.where(data_attr[\"time\"] == timerange[0])\n", + "\n", + "scatter = ax.scatter(\n", + " data_other[\"lon\"].values[time_id],\n", + " data_other[\"lat\"].values[time_id],\n", + " c=\"b\",\n", + " s=5,\n", + " zorder=1,\n", + ")\n", + "scatter_attr = ax.scatter(\n", + " data_attr[\"lon\"].values[time_id_attr],\n", + " data_attr[\"lat\"].values[time_id_attr],\n", + " c=\"r\",\n", + " s=40,\n", + " zorder=2,\n", + ")\n", + "\n", + "circs = []\n", + "for lon_a, lat_a in zip(\n", + " data_attr[\"lon\"].values[time_id_attr], data_attr[\"lat\"].values[time_id_attr]\n", + "):\n", + " circs.append(\n", + " ax.add_patch(\n", + " plt.Circle(\n", + " (lon_a, lat_a), 0.25, facecolor=\"None\", edgecolor=\"r\", linestyle=\"--\"\n", + " )\n", + " )\n", + " )\n", + "\n", + "t = str(timerange[0].astype(\"timedelta64[s]\"))\n", + "title = ax.set_title(\"Particles at t = \" + t + \" (Red particles are attractors)\")\n", + "\n", + "\n", + "def animate(i):\n", + " t = str(timerange[i].astype(\"timedelta64[s]\"))\n", + " title.set_text(\"Particles at t = \" + t + \"\\n (Red particles are attractors)\")\n", + "\n", + " time_id = np.where(data_other[\"time\"] == timerange[i])\n", + " time_id_attr = np.where(data_attr[\"time\"] == timerange[i])\n", + " scatter.set_offsets(\n", + " np.c_[data_other[\"lon\"].values[time_id], data_other[\"lat\"].values[time_id]]\n", + " )\n", + " scatter_attr.set_offsets(\n", + " np.c_[\n", + " data_attr[\"lon\"].values[time_id_attr], data_attr[\"lat\"].values[time_id_attr]\n", + " ]\n", + " )\n", + " for c, lon_a, lat_a in zip(\n", + " circs,\n", + " data_attr[\"lon\"].values[time_id_attr],\n", + " data_attr[\"lat\"].values[time_id_attr],\n", + " ):\n", + " c.center = (lon_a, lat_a)\n", + "\n", + "\n", + "# Create animation\n", + "anim = FuncAnimation(fig, animate, frames=len(timerange), interval=100)\n", + "plt.close(fig)\n", + "anim" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "8", + "metadata": {}, + "source": [ + "## Merging particles\n", + "\n", + "Another type of interaction is the merging of particles. The merging Kernel also comes with limitations (only mutual-nearest particles can be accurately merged), so this is really just a prototype. Nevertheless, the example below shows the possibilities that merging of particles can provide for more complex simulations." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": {}, + "outputs": [], + "source": [ + "def Merge(particles, fieldset):\n", + " \"\"\"Kernel that \"merges\" two particles that are within interaction_distance range\n", + " Particles must have a 'mass' Variable, and during merging the mass of the smallest\n", + " particle is transferred to the largest particle. The smallest particle is then deleted.\n", + " \"\"\"\n", + " interaction_distance = 0.05\n", + "\n", + " N = len(particles.lon)\n", + "\n", + " # calculate pairwise distances (n_particles × n_particles)\n", + " dx = particles.lon[None, :] - particles.lon[:, None]\n", + " dy = particles.lat[None, :] - particles.lat[:, None]\n", + " distances = np.sqrt(dx**2 + dy**2)\n", + "\n", + " # mask distances by interaction range\n", + " within = (distances > 0) & (distances < interaction_distance)\n", + " dist_masked = np.where(within, distances, np.inf)\n", + "\n", + " # nearest neighbour for each particle (index) and corresponding distance\n", + " nn = np.argmin(dist_masked, axis=1)\n", + " nn_dist = dist_masked[np.arange(N), nn]\n", + " has_nn = np.isfinite(nn_dist)\n", + "\n", + " # mutual nearest: nn[nn[i]] == i\n", + " i_idx = np.arange(N)\n", + " mutual = has_nn & (nn[nn] == i_idx)\n", + "\n", + " # define pairs i and their nearest neighbour j\n", + " pair_mask = mutual & (i_idx < nn)\n", + " pair_i = i_idx[pair_mask]\n", + " pair_j = nn[pair_mask]\n", + "\n", + " if pair_i.size == 0:\n", + " return\n", + "\n", + " # compute which of the two (i, j) has the largest mass\n", + " mass_i = particles.mass[pair_i]\n", + " mass_j = particles.mass[pair_j]\n", + " larger_idx = np.where(mass_j > mass_i, pair_j, pair_i)\n", + " smaller_idx = np.where(mass_j > mass_i, pair_i, pair_j)\n", + "\n", + " # perform transfer and mark deletions\n", + " # TODO note that we use temporary arrays for indexing because of KernelParticle bug (GH #2143)\n", + " masses = particles.mass\n", + " states = particles.state\n", + "\n", + " # transfer mass from smaller to larger and mark smaller for deletion\n", + " masses[larger_idx] += particles.mass[smaller_idx]\n", + " states[smaller_idx] = parcels.StatusCode.Delete\n", + "\n", + " # TODO use particle variables directly after KernelParticle bug (GH #2143) is fixed\n", + " particles.mass = masses\n", + " particles.state = states" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "npart = 800\n", + "\n", + "# Create custom Particle class with extra variable mass\n", + "MergeParticle = parcels.Particle.add_variable(\n", + " parcels.Variable(\"mass\", dtype=np.float32)\n", + ")\n", + "\n", + "pset = parcels.ParticleSet(\n", + " fieldset=DiffusionFieldSet(),\n", + " pclass=MergeParticle,\n", + " lon=np.random.uniform(-1, 1, size=npart),\n", + " lat=np.random.uniform(-1, 1, size=npart),\n", + " mass=np.random.uniform(0.5, 1.5, size=npart),\n", + ")\n", + "\n", + "output_file = parcels.ParticleFile(\n", + " store=\"MergingParticles.zarr\",\n", + " outputdt=np.timedelta64(1, \"s\"),\n", + ")\n", + "\n", + "kernels = [\n", + " parcels.kernels.DiffusionUniformKh,\n", + " Merge, # Add the Merge kernel defined above\n", + "]\n", + "\n", + "pset.execute(\n", + " pyfunc=kernels,\n", + " runtime=np.timedelta64(60, \"s\"),\n", + " dt=np.timedelta64(1, \"s\"),\n", + " output_file=output_file,\n", + " verbose_progress=False,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11", + "metadata": {}, + "outputs": [], + "source": [ + "data_xarray = xr.open_zarr(\"MergingParticles.zarr\")\n", + "\n", + "timerange = np.arange(\n", + " np.nanmin(data_xarray[\"time\"].values),\n", + " np.nanmax(data_xarray[\"time\"].values),\n", + " np.timedelta64(1, \"s\"),\n", + ")\n", + "\n", + "fig = plt.figure(figsize=(4, 4), constrained_layout=True)\n", + "ax = fig.add_subplot()\n", + "\n", + "ax.set_ylabel(\"Meridional distance [m]\")\n", + "ax.set_xlabel(\"Zonal distance [m]\")\n", + "ax.set_xlim(-1.1, 1.1)\n", + "ax.set_ylim(-1.1, 1.1)\n", + "\n", + "time_id = np.where(data_xarray[\"time\"] == timerange[0])\n", + "\n", + "scatter = ax.scatter(\n", + " data_xarray[\"lon\"].values[time_id],\n", + " data_xarray[\"lat\"].values[time_id],\n", + " s=data_xarray[\"mass\"].values[time_id],\n", + " c=\"b\",\n", + " zorder=1,\n", + ")\n", + "\n", + "t = str(timerange[0].astype(\"timedelta64[s]\"))\n", + "title = ax.set_title(\"Particles at t = \" + t)\n", + "\n", + "\n", + "def animate(i):\n", + " t = str(timerange[i].astype(\"timedelta64[s]\"))\n", + " title.set_text(\"Particles at t = \" + t)\n", + "\n", + " time_id = np.where(data_xarray[\"time\"] == timerange[i])\n", + " scatter.set_offsets(\n", + " np.c_[data_xarray[\"lon\"].values[time_id], data_xarray[\"lat\"].values[time_id]]\n", + " )\n", + " scatter.set_sizes(data_xarray[\"mass\"].values[time_id])\n", + "\n", + " return (scatter,)\n", + "\n", + "\n", + "anim = FuncAnimation(fig, animate, frames=len(timerange), interval=100)\n", + "plt.close(fig)\n", + "anim" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "test-latest", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/user_guide/examples_v3/tutorial_interaction.ipynb b/docs/user_guide/examples_v3/tutorial_interaction.ipynb deleted file mode 100644 index 0fd6e89a33..0000000000 --- a/docs/user_guide/examples_v3/tutorial_interaction.ipynb +++ /dev/null @@ -1,433 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "id": "0", - "metadata": {}, - "source": [ - "# Particle-Particle interaction\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "1", - "metadata": {}, - "source": [ - "In this notebook, we show an example of the new 'particle-particle-interaction' functionality in Parcels. Note that this functionality is still in development, and the implementation is fairly rudimentary and slow for now. Importantly:\n", - "\n", - "- Particle-particle interaction only works in Scipy mode\n", - "- The type of interactions that are supported is still limited\n", - "\n", - "Interactions are implemented through `InteractionKernels`, which are similar to normal `Kernels`. The `InteractionKernels` are applied between particles that are located closer to each other than a specified `interaction_distance`. In general, the code structure needs three adaptations to apply particle-particle interaction:\n", - "\n", - "1. The `ParticleSet` requires an `interaction_distance` argument upon creation, to define the `interaction_distance`.\n", - "2. `ParticleSet.execute()` requires the `pyfunc_inter` argument, which contains the `InteractionKernels` that will be executed, similarly to the `pyfunc` argument for normal `Kernels`.\n", - "3. `InteractionKernels` have two additional arguments compared to normal `Kernels`:\n", - "\n", - "```python\n", - "def InteractionKernel(particle, fieldset, time, neighbors, mutator)\n", - "```\n", - "\n", - "The `neighbors` argument provides a list of the particles that are within a neighborhood (i.e. closer than the `interaction_distance` argument in `ParticleSet` creation).\n", - "\n", - "The `mutator` argument is an initially empty list with all the mutations that need to be performed on particles at the end of running all `InteractionKernels` on all particles.\n", - "This `mutator` argument is required, because otherwise the order at which interactions are applied has implications for the simulation. As a consequence, the simulation will likely be dependent on the order of the particle list if no mutator list is used.\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "2", - "metadata": {}, - "source": [ - "## Pulling particles\n", - "\n", - "Below is an example of what can be done with particle-particle interaction. We create a square grid of $N\\times N$ particles, which are all subject to Brownian Motion (via the built-in `DiffusionUniformKh` Kernel). Furthermore, some of the particles also 'attract' other particles that are within the interaction distance: these attracted particles move with a constant velocity to the attracting particles.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3", - "metadata": {}, - "outputs": [], - "source": [ - "%matplotlib notebook\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import xarray as xr\n", - "from IPython.display import HTML\n", - "from matplotlib.animation import FuncAnimation\n", - "\n", - "import parcels" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4", - "metadata": {}, - "outputs": [], - "source": [ - "def Pull(particle, fieldset, time, neighbors, mutator):\n", - " \"\"\"InterActionKernel that \"pulls\" all neighbor particles\n", - " toward the attracting particle with a constant velocity\"\"\"\n", - " distances = []\n", - " na_neighbors = []\n", - " # only execute kernel for particles that are 'attractor'\n", - " if not particle.attractor:\n", - " return StateCode.Success\n", - " for n in neighbors:\n", - " if n.attractor:\n", - " continue\n", - " x_p = np.array([particle.depth, particle.lat, particle.lon])\n", - " x_n = np.array([n.depth, n.lat, n.lon])\n", - "\n", - " # compute distance between attracted and attracting particle\n", - " distances.append(np.linalg.norm(x_p - x_n))\n", - " na_neighbors.append(n)\n", - "\n", - " velocity = 0.04 # predefined attracting velocity\n", - " for n in na_neighbors:\n", - " assert n.dt == particle.dt\n", - " dx = np.array(\n", - " [particle.lat - n.lat, particle.lon - n.lon, particle.depth - n.depth]\n", - " )\n", - " dx_norm = np.linalg.norm(dx)\n", - "\n", - " # calculate vector of position change\n", - " distance = velocity * n.dt\n", - " d_vec = distance * dx / dx_norm\n", - "\n", - " # define mutation function for mutator\n", - " def f(n, dlat, dlon, ddepth):\n", - " n.lat_nextloop += (\n", - " dlat # note that we need to change the locations for the next loop\n", - " )\n", - " n.lon_nextloop += dlon\n", - " n.depth_nextloop += ddepth\n", - "\n", - " # add mutation to the mutator\n", - " mutator[n.id].append((f, d_vec))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5", - "metadata": {}, - "outputs": [], - "source": [ - "npart = 11\n", - "\n", - "X, Y = np.meshgrid(np.linspace(-1, 1, npart), np.linspace(-1, 1, npart))\n", - "\n", - "# Define a fieldset without flow\n", - "fieldset = parcels.FieldSet.from_data(\n", - " {\"U\": 0, \"V\": 0}, {\"lon\": 0, \"lat\": 0}, mesh=\"flat\"\n", - ")\n", - "fieldset.add_constant_field(\"Kh_zonal\", 0.0005, mesh=\"flat\")\n", - "fieldset.add_constant_field(\"Kh_meridional\", 0.0005, mesh=\"flat\")\n", - "\n", - "\n", - "# Create custom particle class with extra variable that indicates\n", - "# whether the interaction kernel should be executed on this particle.\n", - "InteractingParticle = parcels.Particle.add_variable(\n", - " \"attractor\", dtype=np.bool_, to_write=\"once\"\n", - ")\n", - "\n", - "\n", - "attractor = [\n", - " True if i in [int(npart * npart / 2 - 3), int(npart * npart / 2 + 3)] else False\n", - " for i in range(npart * npart)\n", - "]\n", - "pset = parcels.ParticleSet(\n", - " fieldset=fieldset,\n", - " pclass=InteractingParticle,\n", - " lon=X,\n", - " lat=Y,\n", - " interaction_distance=0.5, # note the interaction_distance argument here\n", - " attractor=attractor,\n", - ")\n", - "\n", - "output_file = pset.ParticleFile(name=\"InteractingParticles.zarr\", outputdt=1)\n", - "\n", - "pset.execute(\n", - " pyfunc=parcels.kernels.DiffusionUniformKh,\n", - " pyfunc_inter=Pull, # note the pyfunc_inter here\n", - " runtime=60,\n", - " dt=1,\n", - " output_file=output_file,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "%%capture\n", - "data_xarray = xr.open_zarr(\"InteractingParticles.zarr\")\n", - "data_attr = data_xarray.where(data_xarray[\"attractor\"].compute() == 1, drop=True)\n", - "data_other = data_xarray.where(data_xarray[\"attractor\"].compute() == 0, drop=True)\n", - "\n", - "timerange = np.arange(\n", - " np.nanmin(data_xarray[\"time\"].values),\n", - " np.nanmax(data_xarray[\"time\"].values),\n", - " np.timedelta64(1, \"s\"), # timerange in nanoseconds\n", - ")\n", - "\n", - "fig = plt.figure(figsize=(4, 4), constrained_layout=True)\n", - "ax = fig.add_subplot()\n", - "\n", - "ax.set_ylabel(\"Meridional distance [m]\")\n", - "ax.set_xlabel(\"Zonal distance [m]\")\n", - "ax.set_xlim(-1.1, 1.1)\n", - "ax.set_ylim(-1.1, 1.1)\n", - "\n", - "time_id = np.where(\n", - " data_other[\"time\"] == timerange[0]\n", - ") # Indices of the data where time = 0\n", - "time_id_attr = np.where(\n", - " data_attr[\"time\"] == timerange[0]\n", - ") # Indices of the data where time = 0\n", - "\n", - "scatter = ax.scatter(\n", - " data_other[\"lon\"].values[time_id],\n", - " data_other[\"lat\"].values[time_id],\n", - " c=\"b\",\n", - " s=5,\n", - " zorder=1,\n", - ")\n", - "scatter_attr = ax.scatter(\n", - " data_attr[\"lon\"].values[time_id_attr],\n", - " data_attr[\"lat\"].values[time_id_attr],\n", - " c=\"r\",\n", - " s=40,\n", - " zorder=2,\n", - ")\n", - "\n", - "circs = []\n", - "for lon_a, lat_a in zip(\n", - " data_attr[\"lon\"].values[time_id_attr], data_attr[\"lat\"].values[time_id_attr]\n", - "):\n", - " circs.append(\n", - " ax.add_patch(\n", - " plt.Circle(\n", - " (lon_a, lat_a), 0.25, facecolor=\"None\", edgecolor=\"r\", linestyle=\"--\"\n", - " )\n", - " )\n", - " )\n", - "\n", - "t = str(timerange[0].astype(\"timedelta64[s]\"))\n", - "title = ax.set_title(\"Particles at t = \" + t + \" (Red particles are attractors)\")\n", - "\n", - "\n", - "def animate(i):\n", - " t = str(timerange[i].astype(\"timedelta64[s]\"))\n", - " title.set_text(\"Particles at t = \" + t + \"\\n (Red particles are attractors)\")\n", - "\n", - " time_id = np.where(data_other[\"time\"] == timerange[i])\n", - " time_id_attr = np.where(data_attr[\"time\"] == timerange[i])\n", - " scatter.set_offsets(\n", - " np.c_[data_other[\"lon\"].values[time_id], data_other[\"lat\"].values[time_id]]\n", - " )\n", - " scatter_attr.set_offsets(\n", - " np.c_[\n", - " data_attr[\"lon\"].values[time_id_attr], data_attr[\"lat\"].values[time_id_attr]\n", - " ]\n", - " )\n", - " for c, lon_a, lat_a in zip(\n", - " circs,\n", - " data_attr[\"lon\"].values[time_id_attr],\n", - " data_attr[\"lat\"].values[time_id_attr],\n", - " ):\n", - " c.center = (lon_a, lat_a)\n", - " return (\n", - " scatter,\n", - " scatter_attr,\n", - " circs,\n", - " )\n", - "\n", - "\n", - "anim = FuncAnimation(fig, animate, frames=len(timerange), interval=200, blit=True)\n", - "data_xarray.close()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], - "source": [ - "HTML(anim.to_jshtml())" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## Merging particles\n", - "\n", - "Another type of interaction that is supported is the merging of particles. The supported merging functions also comes with limitations (only mutual-nearest particles can be accurately merged), so this is really just a prototype. Nevertheless, the example below shows the possibilities that merging of particles can provide for more complex simulations.\n", - "\n", - "In the example below, we use two build-in Kernels: `NearestNeighborWithinRange` and `MergeWithNearestNeighbor`.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9", - "metadata": {}, - "outputs": [], - "source": [ - "npart = 800\n", - "\n", - "X = np.random.uniform(-1, 1, size=npart)\n", - "Y = np.random.uniform(-1, 1, size=npart)\n", - "\n", - "# Define a fieldset without flow\n", - "fieldset = parcels.FieldSet.from_data(\n", - " {\"U\": 0, \"V\": 0}, {\"lon\": 0, \"lat\": 0}, mesh=\"flat\"\n", - ")\n", - "fieldset.add_constant_field(\"Kh_zonal\", 0.0005, mesh=\"flat\")\n", - "fieldset.add_constant_field(\"Kh_meridional\", 0.0005, mesh=\"flat\")\n", - "\n", - "\n", - "# Create custom InteractionParticle class\n", - "# with extra variables nearest_neighbor and mass\n", - "MergeParticle = parcels.InteractionParticle.add_variables(\n", - " [\n", - " parcels.Variable(\"nearest_neighbor\", dtype=np.int64, to_write=False),\n", - " parcels.Variable(\"mass\", initial=1, dtype=np.float32),\n", - " ]\n", - ")\n", - "\n", - "pset = parcels.ParticleSet(\n", - " fieldset=fieldset,\n", - " pclass=MergeParticle,\n", - " lon=X,\n", - " lat=Y,\n", - " interaction_distance=0.05, # note this argument here\n", - ")\n", - "\n", - "output_file = pset.ParticleFile(name=\"MergingParticles.zarr\", outputdt=1)\n", - "\n", - "pset.execute(\n", - " pyfunc=parcels.kernels.DiffusionUniformKh,\n", - " pyfunc_inter=pset.InteractionKernel(parcels.NearestNeighborWithinRange)\n", - " + parcels.MergeWithNearestNeighbor, # note the pyfunc_inter here\n", - " runtime=60,\n", - " dt=1,\n", - " output_file=output_file,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "10", - "metadata": {}, - "outputs": [], - "source": [ - "%%capture\n", - "data_xarray = xr.open_zarr(\"MergingParticles.zarr\")\n", - "\n", - "timerange = np.arange(\n", - " np.nanmin(data_xarray[\"time\"].values),\n", - " np.nanmax(data_xarray[\"time\"].values),\n", - " np.timedelta64(1, \"s\"),\n", - ") # timerange in nanoseconds\n", - "\n", - "fig = plt.figure(figsize=(4, 4), constrained_layout=True)\n", - "ax = fig.add_subplot()\n", - "\n", - "ax.set_ylabel(\"Meridional distance [m]\")\n", - "ax.set_xlabel(\"Zonal distance [m]\")\n", - "ax.set_xlim(-1.1, 1.1)\n", - "ax.set_ylim(-1.1, 1.1)\n", - "\n", - "time_id = np.where(\n", - " data_xarray[\"time\"] == timerange[0]\n", - ") # Indices of the data where time = 0\n", - "\n", - "scatter = ax.scatter(\n", - " data_xarray[\"lon\"].values[time_id],\n", - " data_xarray[\"lat\"].values[time_id],\n", - " s=data_xarray[\"mass\"].values[time_id],\n", - " c=\"b\",\n", - " zorder=1,\n", - ")\n", - "\n", - "t = str(timerange[0].astype(\"timedelta64[s]\"))\n", - "title = ax.set_title(\"Particles at t = \" + t)\n", - "\n", - "\n", - "def animate(i):\n", - " t = str(timerange[i].astype(\"timedelta64[s]\"))\n", - " title.set_text(\"Particles at t = \" + t)\n", - "\n", - " time_id = np.where(data_xarray[\"time\"] == timerange[i])\n", - " scatter.set_offsets(\n", - " np.c_[data_xarray[\"lon\"].values[time_id], data_xarray[\"lat\"].values[time_id]]\n", - " )\n", - " scatter.set_sizes(data_xarray[\"mass\"].values[time_id])\n", - "\n", - " return (scatter,)\n", - "\n", - "\n", - "anim = FuncAnimation(fig, animate, frames=len(timerange), interval=200, blit=True)\n", - "data_xarray.close()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "11", - "metadata": {}, - "outputs": [], - "source": [ - "HTML(anim.to_jshtml())" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "id": "12", - "metadata": {}, - "source": [ - "## Interacting with the FieldSet\n", - "\n", - "An important feature of Parcels is to evaluate a `Field` at the `Particle` location using the square bracket notation: `particle.Temperature = fieldset.T[time, depth, lat, lon]`. These types of particle-field interactions are recommended to be implemented in standard `Kernels`, since the `InteractionKernels` do not report the `StateCodes` that are used to flag particles that encounter an error in the particle-field interaction, e.g. `OutOfBoundsError`. Any variable that is needed in the particle-particle interaction can be stored in a `Variable` by sampling the field in a `Kernel` before executing the `InteractionKernel`.\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.6" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/user_guide/index.md b/docs/user_guide/index.md index ec443dbf22..a462894559 100644 --- a/docs/user_guide/index.md +++ b/docs/user_guide/index.md @@ -54,6 +54,7 @@ examples/tutorial_statuscodes.ipynb examples/tutorial_gsw_density.ipynb examples/tutorial_Argofloats.ipynb examples/tutorial_diffusion.ipynb +examples/tutorial_interaction.ipynb ``` ```{toctree} @@ -65,7 +66,6 @@ examples/tutorial_interpolation.ipynb ``` - diff --git a/docs/user_guide/v4-migration.md b/docs/user_guide/v4-migration.md index f355752612..a5685e58e0 100644 --- a/docs/user_guide/v4-migration.md +++ b/docs/user_guide/v4-migration.md @@ -14,6 +14,7 @@ Version 4 of Parcels is unreleased at the moment. The information in this migrat - The `particle` argument in the Kernel signature has been renamed to `particles`. - `math` functions should be replaced with array compatible equivalents (e.g., `math.sin` -> `np.sin`). Instead of `ParcelsRandom` you should use numpy's random functions. - `particle.depth` has been changed to `particles.z` to be consistent with the [CF conventions for trajectory data](https://cfconventions.org/cf-conventions/cf-conventions.html#trajectory-data), and to make Parcels also generalizable to atmospheric contexts. +- The `InteractionKernel` class has been removed. Since normal Kernels now have access to _all_ particles, particle-particle interaction can be performed within normal Kernels. ## FieldSet diff --git a/src/parcels/_core/particleset.py b/src/parcels/_core/particleset.py index fe23db61a0..29c91d4ada 100644 --- a/src/parcels/_core/particleset.py +++ b/src/parcels/_core/particleset.py @@ -53,9 +53,6 @@ class ParticleSet: Optional list of "trajectory" values (integers) for the particle IDs partition_function : Function to use for partitioning particles over processors. Default is to use kMeans - periodic_domain_zonal : - Zonal domain size, used to apply zonally periodic boundaries for particle-particle - interaction. If None, no zonally periodic boundaries are applied Other Variables can be initialised using further arguments (e.g. v=... for a Variable named 'v') """ @@ -74,7 +71,6 @@ def __init__( self._data = None self._repeat_starttime = None self._kernel = None - self._interaction_kernel = None self.fieldset = fieldset lon = np.empty(shape=0) if lon is None else _convert_to_flat_array(lon) @@ -228,8 +224,6 @@ def add(self, particles): for d in self._data: self._data[d] = np.concatenate((self._data[d], particles._data[d])) - # Adding particles invalidates the neighbor search structure. - self._dirty_neighbor = True return self def __iadd__(self, particles): @@ -256,44 +250,6 @@ def remove_indices(self, indices): for d in self._data: self._data[d] = np.delete(self._data[d], indices, axis=0) - def _active_particles_mask(self, time, dt): - active_indices = (time - self._data["time"]) / dt >= 0 - non_err_indices = np.isin(self._data["state"], [StatusCode.Success, StatusCode.Evaluate]) - active_indices = np.logical_and(active_indices, non_err_indices) - self._active_particle_idx = np.where(active_indices)[0] - return active_indices - - def _compute_neighbor_tree(self, time, dt): - active_mask = self._active_particles_mask(time, dt) - - self._values = np.vstack( - ( - self._data["z"], - self._data["lat"], - self._data["lon"], - ) - ) - if self._dirty_neighbor: - self._neighbor_tree.rebuild(self._values, active_mask=active_mask) - self._dirty_neighbor = False - else: - self._neighbor_tree.update_values(self._values, new_active_mask=active_mask) - - def _neighbors_by_index(self, particle_idx): - neighbor_idx, distances = self._neighbor_tree.find_neighbors_by_idx(particle_idx) - neighbor_idx = self._active_particle_idx[neighbor_idx] - mask = neighbor_idx != particle_idx - neighbor_idx = neighbor_idx[mask] - if "horiz_dist" in self._data._ptype.variables: - self._data["vert_dist"][neighbor_idx] = distances[0, mask] - self._data["horiz_dist"][neighbor_idx] = distances[1, mask] - return True # TODO fix for v4 ParticleDataIterator(self.particledata, subset=neighbor_idx) - - def _neighbors_by_coor(self, coor): - neighbor_idx = self._neighbor_tree.find_neighbors_by_coor(coor) - neighbor_ids = self._data["trajectory"][neighbor_idx] - return neighbor_ids - def populate_indices(self): """Pre-populate guesses of particle ei (element id) indices""" for i, grid in enumerate(self.fieldset.gridset): @@ -359,13 +315,6 @@ def Kernel(self, pyfunc): pyfuncs=[pyfunc], ) - def InteractionKernel(self, pyfunc_inter): - from parcels.interaction.interactionkernel import InteractionKernel - - if pyfunc_inter is None: - return None - return InteractionKernel(self.fieldset, self._ptype, pyfunc=pyfunc_inter) - def data_indices(self, variable_name, compare_values, invert=False): """Get the indices of all particles where the value of `variable_name` equals (one of) `compare_values`. diff --git a/src/parcels/interaction/__init__.py b/src/parcels/interaction/__init__.py deleted file mode 100644 index 71eee987aa..0000000000 --- a/src/parcels/interaction/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# from .interactionkernel import InteractionKernel diff --git a/src/parcels/interaction/interactionkernel.py b/src/parcels/interaction/interactionkernel.py deleted file mode 100644 index 039297f5d6..0000000000 --- a/src/parcels/interaction/interactionkernel.py +++ /dev/null @@ -1,216 +0,0 @@ -import inspect -import warnings -from collections import defaultdict - -import numpy as np - -from parcels._compat import MPI -from parcels.field import VectorField -from parcels.kernel import BaseKernel -from parcels.tools.statuscodes import StatusCode - -__all__ = ["InteractionKernel"] - - -class InteractionKernel(BaseKernel): - """InteractionKernel object that encapsulates auto-generated code. - - InteractionKernels do not implement ways to catch or recover from - errors caused during execution of the kernel function(s). - It is strongly recommended not to sample from fields inside an - InteractionKernel. - """ - - def __init__( - self, - fieldset, - ptype, - pyfunc=None, - funcname=None, - py_ast=None, - ): - if MPI is not None and MPI.COMM_WORLD.Get_size() > 1: - raise NotImplementedError( - "InteractionKernels are not supported in an MPI environment. Please run your simulation outside MPI." - ) - - if MPI is not None and MPI.COMM_WORLD.Get_size() > 1: - raise NotImplementedError( - "InteractionKernels are not supported in an MPI environment. Please run your simulation outside MPI." - ) - - if pyfunc is not None: - if isinstance(pyfunc, list): - funcname = "".join([func.__name__ for func in pyfunc]) - else: - funcname = pyfunc.__name__ - - super().__init__( - fieldset=fieldset, - ptype=ptype, - pyfunc=pyfunc, - funcname=funcname, - py_ast=py_ast, - ) - - if pyfunc is not None: - if isinstance(pyfunc, list): - funcname = "".join([func.__name__ for func in pyfunc]) - else: - funcname = pyfunc.__name__ - - if pyfunc is not None: - if isinstance(pyfunc, list): - self._pyfunc = pyfunc - else: - self._pyfunc = [pyfunc] - - for func in self._pyfunc: - self.check_fieldsets_in_kernels(func) - - numkernelargs = self.check_kernel_signature_on_version() - - assert numkernelargs[0] == 5 and numkernelargs.count(numkernelargs[0]) == len(numkernelargs), ( - "Interactionkernels take exactly 5 arguments: particle, fieldset, time, neighbors, mutator" - ) - - def check_fieldsets_in_kernels(self, pyfunc): - # Currently, the implemented interaction kernels do not impose - # any requirements on the fieldset - pass - - def check_kernel_signature_on_version(self): - """ - Returns numkernelargs. - Adaptation of this method in the BaseKernel that works with - lists of functions. - """ - numkernelargs = [] - if self._pyfunc is not None and isinstance(self._pyfunc, list): - for func in self._pyfunc: - numkernelargs.append(len(inspect.getfullargspec(func).args)) - return numkernelargs - - def merge(self, kernel, kclass): - assert self.__class__ == kernel.__class__ - funcname = self.funcname + kernel.funcname - pyfunc = self._pyfunc + kernel._pyfunc - return kclass(self._fieldset, self._ptype, pyfunc=pyfunc, funcname=funcname) - - def __add__(self, kernel): - if not isinstance(kernel, InteractionKernel): - kernel = InteractionKernel(self.fieldset, self.ptype, pyfunc=kernel) - return self.merge(kernel, InteractionKernel) - - def __radd__(self, kernel): - if not isinstance(kernel, InteractionKernel): - kernel = InteractionKernel(self.fieldset, self.ptype, pyfunc=kernel) - return kernel.merge(self, InteractionKernel) - - def execute_python(self, pset, endtime, dt): - """Performs the core update loop via Python. - - InteractionKernels do not implement ways to catch or recover from - errors caused during execution of the kernel function(s). - It is strongly recommended not to sample from fields inside an - InteractionKernel. - """ - if self.fieldset is not None: - for f in self.fieldset.fields.values(): - if isinstance(f, VectorField): - continue - f.data = np.array(f.data) - - reset_particle_idx = [] - for pyfunc in self._pyfunc: - pset._compute_neighbor_tree(endtime, dt) - active_idx = pset._active_particle_idx - - mutator = defaultdict(lambda: []) - - # Loop only over particles that are in a positive state and have started. - for particle_idx in active_idx: - p = pset[particle_idx] - # Don't use particles that are not started. - if (endtime - p.time) / dt <= -1e-7: - continue - elif (endtime - p.time) / dt < 1: - p.dt = endtime - p.time - reset_particle_idx.append(particle_idx) - - neighbors = pset._neighbors_by_index(particle_idx) - try: - res = pyfunc(p, pset.fieldset, p.time, neighbors, mutator) - except Exception as e: - res = StatusCode.Error - p.exception = e - - # InteractionKernels do not implement a way to recover - # from errors. - if res != StatusCode.Success: - warnings.warn( - "Some InteractionKernel was not completed succesfully, likely because a Particle threw an error that was not captured.", - RuntimeWarning, - stacklevel=2, - ) - - for particle_idx in active_idx: - p = pset[particle_idx] - try: - for mutator_func, args in mutator[p.trajectory]: - mutator_func(p, *args) - except KeyError: - pass - for particle_idx in reset_particle_idx: - pset[particle_idx].dt = dt - - def execute(self, pset, endtime, dt, output_file=None): - """Execute this Kernel over a ParticleSet for several timesteps. - - InteractionKernels do not implement ways to catch or recover from - errors caused during execution of the kernel function(s). - It is strongly recommended not to sample from fields inside an - InteractionKernel. - """ - pset.particledata.state[:] = StatusCode.Evaluate - - if abs(dt) < 1e-6: - warnings.warn( - "'dt' is too small, causing numerical accuracy limit problems. Please chose a higher 'dt' and rather scale the 'time' axis of the field accordingly. (related issue #762)", - RuntimeWarning, - stacklevel=2, - ) - - self.execute_python(pset, endtime, dt) - - # Remove all particles that signalled deletion - self.remove_deleted(pset) # Generalizable version! - - # Identify particles that threw errors - n_error = pset._num_error_particles - - while n_error > 0: - error_pset = pset._error_particles - # Check for StatusCodes - for p in error_pset: - if p.state == StatusCode.StopExecution: - return - if p.state == StatusCode.Repeat: - p.state = StatusCode.Evaluate - elif p.state == StatusCode.Delete: - pass - else: - warnings.warn( - f"Deleting particle {p.trajectory} because of non-recoverable error", - RuntimeWarning, - stacklevel=2, - ) - p.delete() - - # Remove all particles that signalled deletion - self.remove_deleted(pset) # Generalizable version! - - # Execute core loop again to continue interrupted particles - self.execute_python(pset, endtime, dt) - - n_error = pset._num_error_particles diff --git a/src/parcels/interaction/neighborsearch/__init__.py b/src/parcels/interaction/neighborsearch/__init__.py deleted file mode 100644 index 9b7175a7e4..0000000000 --- a/src/parcels/interaction/neighborsearch/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -from parcels.interaction.neighborsearch.bruteforce import ( - BruteFlatNeighborSearch, - BruteSphericalNeighborSearch, -) -from parcels.interaction.neighborsearch.hashflat import HashFlatNeighborSearch -from parcels.interaction.neighborsearch.hashspherical import ( - HashSphericalNeighborSearch, -) -from parcels.interaction.neighborsearch.kdtreeflat import ( - KDTreeFlatNeighborSearch, -) - -__all__ = [ - "BruteFlatNeighborSearch", - "BruteSphericalNeighborSearch", - "HashFlatNeighborSearch", - "HashSphericalNeighborSearch", - "KDTreeFlatNeighborSearch", -] diff --git a/src/parcels/interaction/neighborsearch/base.py b/src/parcels/interaction/neighborsearch/base.py deleted file mode 100644 index 09578aead4..0000000000 --- a/src/parcels/interaction/neighborsearch/base.py +++ /dev/null @@ -1,227 +0,0 @@ -from abc import ABC, abstractmethod - -import numpy as np - -from parcels.interaction.neighborsearch.distanceutils import spherical_distance - - -class BaseNeighborSearch(ABC): - """Base class for searching particles in the neighborhood. - - The data structure of the class (and subclasses) only contain spatial - information. Additionally its input is in array format (3, n_particles), - which makes it the most efficient with the SoA (structure of arrays) data - structure. - """ - - def __init__(self, inter_dist_vert, inter_dist_horiz, max_depth=100000, periodic_domain_zonal=None): - """Initialize neighbor search - - - Parameters - ---------- - inter_dist_vert : float - Interaction distance (vertical) in m. - inter_dist_horiz : float - interaction distance (horizontal) in m - max_depth : float, optional - Maximum depth of the particles (default is 100000m). - """ - self.inter_dist_vert = inter_dist_vert - self.inter_dist_horiz = inter_dist_horiz - self.inter_dist = np.array([inter_dist_vert, inter_dist_horiz, inter_dist_horiz]).reshape(3, 1) - self.max_depth = max_depth # Maximum depth of particles. - self._values = None # Coordinates of the particles. - - # Boolean array denoting active particles. - # These are particles 1) already started at the current time and - # 2) are set to a positive state (Success/Evaluate). - # Thus, this mask allows for particles do be deactivated without - # needing to completely rebuild the tree. - self._active_mask = None - self.periodic_domain_zonal = periodic_domain_zonal - - @abstractmethod - def find_neighbors_by_coor(self, coor): - """Get the neighbors around a certain location. - - Parameters - ---------- - coor : - Numpy array with [depth, lat, lon]. - - Returns - ------- - type - List of particle indices. - - """ - raise NotImplementedError - - def find_neighbors_by_idx(self, particle_idx): - """Get the neighbors around a certain particle. - - Mainly useful for Structure of Array (SoA) datastructure - - Parameters - ---------- - particle_idx : - index of the particle (SoA). - - Returns - ------- - type - List of particle indices - - """ - coor = self._values[:, particle_idx].reshape(3, 1) - return self.find_neighbors_by_coor(coor) - - def update_values(self, new_values, new_active_mask=None): - """Update the coordinates of the particles. - - This is a default implementation simply rebuilds the structure. - If the rebuilding is slow, a faster implementation can be provided. - - Parameters - ---------- - new_values : - numpy array ([depth, lat, lon], n_particles) with - new coordinates of the particles. - new_active_mask : - boolean array indicating active particles. (Default value = None) - """ - self.rebuild(new_values, new_active_mask) - - def rebuild(self, values, active_mask=-1): - """Rebuild the neighbor structure from scratch. - - Parameters - ---------- - values : - numpy array with coordinates of particles - (same as update). - active_mask : - boolean array indicating active particles. (Default value = -1) - """ - if values is not None: - self._values = values - if active_mask is None: - self._active_mask = np.arange(self._values.shape[1]) - - # If active_mask == -1, then don't update the active mask. - if not (isinstance(active_mask, int) and active_mask == -1): - self._active_mask = active_mask - self._active_idx = self.active_idx - - @property - def active_idx(self): - """Indices of the currently active mask.""" - # See __init__ comments for a more detailed explanation. - if self._active_mask is None: - return np.arange(self._values.shape[1]) - return np.where(self._active_mask)[0] - - @abstractmethod - def _distance(self, coor, subset_idx): - """Distance between a coordinate and particles - - Distance depends on the mesh (spherical/flat). - - Parameters - ---------- - coor : - Numpy array with 3D coordinates ([depth, lat, lon]). - subset_idx : - Indices of the particles to compute the distance to. - - Returns - ------- - type - horiz_dist: distance in the horizontal direction - - """ - raise NotImplementedError - - def _get_close_neighbor_dist(self, coor, subset_idx): - """Compute distances and remove non-neighbors. - - Parameters - ---------- - coor : - Numpy array with 3D coordinates ([depth, lat, lon]). - subset_idx : - Indices of the particles to compute the distance to. - - Returns - ------- - type - neighbor_idx: Indices within the interaction distance. - - """ - vert_distance, horiz_distance = self._distance(coor, subset_idx) - rel_distances = np.sqrt( - (horiz_distance / self.inter_dist_horiz) ** 2 + (vert_distance / self.inter_dist_vert) ** 2 - ) - rel_neighbor_idx = np.where(rel_distances < 1)[0] - neighbor_idx = subset_idx[rel_neighbor_idx] - distances = np.vstack((vert_distance[rel_neighbor_idx], horiz_distance[rel_neighbor_idx])) - return neighbor_idx, distances - - -class BaseFlatNeighborSearch(BaseNeighborSearch): - """Base class for neighbor searches with a flat mesh.""" - - def _distance(self, coor, subset_idx): - coor = coor.reshape(3, 1) - horiz_distance = np.sqrt(np.sum((self._values[1:, subset_idx] - coor[1:]) ** 2, axis=0)) - if self.periodic_domain_zonal: - # If zonal periodic boundaries - coor[2, 0] -= self.periodic_domain_zonal - # distance through Western boundary - hd2 = np.sqrt(np.sum((self._values[1:, subset_idx] - coor[1:]) ** 2, axis=0)) - coor[2, 0] += 2 * self.periodic_domain_zonal - # distance through Eastern boundary - hd3 = np.sqrt(np.sum((self._values[1:, subset_idx] - coor[1:]) ** 2, axis=0)) - coor[2, 0] -= self.periodic_domain_zonal - else: - hd2 = np.full(len(horiz_distance), np.inf) - hd3 = np.full(len(horiz_distance), np.inf) - - horiz_distance = np.column_stack((horiz_distance, hd2, hd3)) - horiz_distance = np.min(horiz_distance, axis=1) - vert_distance = np.abs(self._values[0, subset_idx] - coor[0]) - return (vert_distance, horiz_distance) - - -class BaseSphericalNeighborSearch(BaseNeighborSearch): - """Base class for a neighbor search with a spherical mesh.""" - - def _distance(self, coor, subset_idx): - vert_distances, horiz_distances = spherical_distance( - *coor, - self._values[0, subset_idx], - self._values[1, subset_idx], - self._values[2, subset_idx], - ) - - if self.periodic_domain_zonal: - # If zonal periodic boundaries - coor[2, 0] -= self.periodic_domain_zonal - # distance through Western boundary - hd2 = spherical_distance( - *coor, self._values[0, subset_idx], self._values[1, subset_idx], self._values[2, subset_idx] - )[1] - coor[2, 0] += 2 * self.periodic_domain_zonal - # distance through Eastern boundary - hd3 = spherical_distance( - *coor, self._values[0, subset_idx], self._values[1, subset_idx], self._values[2, subset_idx] - )[1] - coor[2, 0] -= self.periodic_domain_zonal - else: - hd2 = np.full(len(horiz_distances), np.inf) - hd3 = np.full(len(horiz_distances), np.inf) - - horiz_distances = np.column_stack((horiz_distances, hd2, hd3)) - horiz_distances = np.min(horiz_distances, axis=1) - return (vert_distances, horiz_distances) diff --git a/src/parcels/interaction/neighborsearch/basehash.py b/src/parcels/interaction/neighborsearch/basehash.py deleted file mode 100644 index ef3c567aed..0000000000 --- a/src/parcels/interaction/neighborsearch/basehash.py +++ /dev/null @@ -1,200 +0,0 @@ -from abc import ABC, abstractmethod - -import numpy as np - - -class BaseHashNeighborSearch(ABC): - def find_neighbors_by_coor(self, coor): - """Get the neighbors around a certain location. - - Parameters - ---------- - coor : - Numpy array with [depth, lat, lon]. - - Returns - ------- - type - List of particle indices. - - """ - coor = coor.reshape(3, 1) - hash_id = self._values_to_hashes(coor)[0] - return self._find_neighbors(hash_id, coor) - - def find_neighbors_by_idx(self, particle_idx): - """Get the neighbors around a certain particle. - - Mainly useful for Structure of Array (SoA) datastructure - - Parameters - ---------- - particle_idx : - index of the particle (SoA). - - Returns - ------- - type - List of particle indices - - """ - hash_id = self._particle_hashes[particle_idx] - coor = self._values[:, particle_idx].reshape(3, 1) - return self._find_neighbors(hash_id, coor) - - @abstractmethod - def _find_neighbors(self, hash_id, coor): - raise NotImplementedError - - def consistency_check(self): - """See if all values are in their proper place. - - Only used for debugging purposes. - """ - active_idx = self.active_idx - if active_idx is None: - active_idx = np.arange(self._values.shape[1]) - - for idx in active_idx: - cur_hash = self._particle_hashes[idx] - hash_idx = self._hash_idx[idx] - assert self._hashtable[cur_hash][hash_idx] == idx - - n_idx = 0 - for idx_array in self._hashtable.values(): - for idx in idx_array: - assert idx in active_idx - n_idx += len(idx_array) - assert n_idx == len(active_idx) - cur_hashes = self._values_to_hashes(self._values[:, active_idx]) - assert np.all(cur_hashes == self._particle_hashes[active_idx]) - - def update_values(self, new_values, new_active_mask=None): - """Update the locations of (some) of the particles. - - Particles that stay in the same location are computationally cheap. - The order and number of the particles is assumed to remain the same. - - Parameters - ---------- - new_values : - new (depth, lat, lon) values for particles. - new_active_mask : - (Default value = None) - """ - if self._values is None: - self.rebuild(new_values, new_active_mask) - return - - if new_active_mask is None: - new_active_mask = np.full(new_values.shape[1], True) - - # Figure out the changes in the active mask. - deactivated_mask = np.logical_and(self._active_mask, np.logical_not(new_active_mask)) - stay_active_mask = np.logical_and(self._active_mask, new_active_mask) - activated_mask = np.logical_and(np.logical_not(self._active_mask), new_active_mask) - - stay_active_idx = np.where(stay_active_mask)[0] - - # Find the old and new hashes of particles that stayed active. - old_hashes = self._particle_hashes[stay_active_mask] - new_hashes = self._values_to_hashes(new_values[:, stay_active_mask]) - - # See which particles have crossed cell boundaries. - move_idx = stay_active_idx[np.where(old_hashes != new_hashes)[0]] - remove_idx = np.append(move_idx, np.where(deactivated_mask)[0]) - add_idx = np.append(move_idx, np.where(activated_mask)[0]) - - # Remove/add/modify particles. - self._deactivate_particles(remove_idx) - self._particle_hashes[stay_active_mask] = new_hashes - self._particle_hashes[activated_mask] = self._values_to_hashes(new_values[:, activated_mask]) - self._activate_particles(add_idx) - - # Set the state to the new values. - self._active_mask = new_active_mask - self._values = new_values - - @abstractmethod - def _values_to_hashes(self, values, active_idx=None): - """Convert (particle) coordinates to hashes. - - The hashes correspond to the cells that particles reside in. - - Parameters - ---------- - values : - 3D coordinates to be hashed. - active_idx : - Active particle indices (relative to values). (Default value = None) - - Returns - ------- - type - all_hashes: An array of length len(values) with hashes. - - """ - raise NotImplementedError - - def _deactivate_particles(self, particle_idx): - """Remove particles from the hashtable.""" - # Get the hashes of the particles to be removed. - remove_split = hash_split(self._particle_hashes[particle_idx]) - for cur_hash, remove_idx in remove_split.items(): - # If the number of items to removed from cur_hash is equal - # to the number of hashes stored under cur_hash, remove the entry. - if len(remove_idx) == len(self._hashtable[cur_hash]): - del self._hashtable[cur_hash] - # Else create a new array that doesn't include remove_idx. - else: - rel_remove_idx = self._hash_idx[particle_idx[remove_idx]] - self._hashtable[cur_hash] = np.delete(self._hashtable[cur_hash], rel_remove_idx) - self._hash_idx[self._hashtable[cur_hash]] = np.arange(len(self._hashtable[cur_hash])) - - def _activate_particles(self, particle_idx): - """Add particles to the hashtable""" - # See _deactivate_particles. - add_split = hash_split(self._particle_hashes[particle_idx]) - for cur_hash, add_idx in add_split.items(): - if cur_hash not in self._hashtable: - self._hashtable[cur_hash] = particle_idx[add_idx] - self._hash_idx[particle_idx[add_idx]] = np.arange(len(add_idx)) - else: - self._hash_idx[particle_idx[add_idx]] = np.arange( - len(self._hashtable[cur_hash]), len(self._hashtable[cur_hash]) + len(add_idx) - ) - self._hashtable[cur_hash] = np.append(self._hashtable[cur_hash], particle_idx[add_idx]) - - -def hash_split(hash_ids, active_idx=None): - """Create a hashtable. - - Multiple particles that are found in the same cell are put in a list - with that particular hash. - - Parameters - ---------- - hash_ids : - Hash values for the particles. - active_idx : - Subset on which to compute the hash split. (Default value = None) - - Returns - ------- - type - hash_split: Dictionary with {hash: [idx_1, idx_2, ..], ..} - - """ - if len(hash_ids) == 0: - return {} - if active_idx is not None: - sort_idx = active_idx[np.argsort(hash_ids[active_idx])] - else: - sort_idx = np.argsort(hash_ids) - - a_sorted = hash_ids[sort_idx] - unq_first = np.concatenate(([True], a_sorted[1:] != a_sorted[:-1])) - unq_items = a_sorted[unq_first] - unq_count = np.diff(np.nonzero(unq_first)[0]) - unq_idx = np.split(sort_idx, np.cumsum(unq_count)) - return dict(zip(unq_items, unq_idx, strict=True)) diff --git a/src/parcels/interaction/neighborsearch/bruteforce.py b/src/parcels/interaction/neighborsearch/bruteforce.py deleted file mode 100644 index f61df90050..0000000000 --- a/src/parcels/interaction/neighborsearch/bruteforce.py +++ /dev/null @@ -1,18 +0,0 @@ -from parcels.interaction.neighborsearch.base import ( - BaseFlatNeighborSearch, - BaseSphericalNeighborSearch, -) - - -class BruteFlatNeighborSearch(BaseFlatNeighborSearch): - """Brute force implementation to find the neighbors.""" - - def find_neighbors_by_coor(self, coor): - return self._get_close_neighbor_dist(coor, self.active_idx) - - -class BruteSphericalNeighborSearch(BaseSphericalNeighborSearch): - """Brute force implementation to find the neighbors.""" - - def find_neighbors_by_coor(self, coor): - return self._get_close_neighbor_dist(coor, self.active_idx) diff --git a/src/parcels/interaction/neighborsearch/distanceutils.py b/src/parcels/interaction/neighborsearch/distanceutils.py deleted file mode 100644 index d5922269c1..0000000000 --- a/src/parcels/interaction/neighborsearch/distanceutils.py +++ /dev/null @@ -1,25 +0,0 @@ -import numpy as np - - -def fast_distance(lat1, lon1, lat2, lon2): - """Compute the arc distance assuming the earth is a sphere. - - This is not the only possible implementation. It was taken from: - https://www.mkompf.com/gps/distcalc.html - """ - g = np.sin(lat1) * np.sin(lat2) + np.cos(lat1) * np.cos(lat2) * np.cos(lon1 - lon2) - return np.arccos(np.minimum(1, g)) - - -def spherical_distance(depth1_m, lat1_deg, lon1_deg, depth2_m, lat2_deg, lon2_deg): - """Compute the arc distance, uses degrees as input.""" - R_earth = 6371000 - lat1 = np.pi * lat1_deg / 180 - lon1 = np.pi * lon1_deg / 180 - lat2 = np.pi * lat2_deg / 180 - lon2 = np.pi * lon2_deg / 180 - - horiz_dist = R_earth * fast_distance(lat1, lon1, lat2, lon2) - - vert_dist = np.abs(depth1_m - depth2_m) - return (vert_dist, horiz_dist) diff --git a/src/parcels/interaction/neighborsearch/hashflat.py b/src/parcels/interaction/neighborsearch/hashflat.py deleted file mode 100644 index a83760a994..0000000000 --- a/src/parcels/interaction/neighborsearch/hashflat.py +++ /dev/null @@ -1,165 +0,0 @@ -import numpy as np - -from parcels.interaction.neighborsearch.base import BaseFlatNeighborSearch -from parcels.interaction.neighborsearch.basehash import ( - BaseHashNeighborSearch, - hash_split, -) - - -class HashFlatNeighborSearch(BaseHashNeighborSearch, BaseFlatNeighborSearch): - """Neighbor search using a hashtable (similar to octtrees).""" - - _box = None - - def _find_neighbors(self, hash_id, coor): - neighbor_blocks = hash_to_neighbors(hash_id, self._bits) - all_neighbor_points = [] - for block in neighbor_blocks: - try: - all_neighbor_points.extend(self._hashtable[block]) - except KeyError: - pass - - pot_neighbors = np.array(all_neighbor_points) - return self._get_close_neighbor_dist(coor, pot_neighbors) - - def update_values(self, new_values, new_active_mask=None): - if not self._check_box(new_values, new_active_mask): - self.rebuild(new_values, new_active_mask) - else: - super().update_values(new_values, new_active_mask=new_active_mask) - - def _check_box(self, new_values, new_active_mask): - """Check whether particles have moved out of the overall box. - - Parameters - ---------- - new_values : - New particle coordinates (depth, lat, lon) to be checked. - new_active_mask : - New active mask for the particles. - - Returns - ------- - type - True if box is still big enough, False if not. - - """ - if self._box is None: - return False - if new_active_mask is None: - active_values = new_values - else: - active_values = new_values[:, new_active_mask] - for i_dim in range(3): - if np.any(active_values[i_dim, :] - self._box[i_dim][0] < 0): - return False - if np.any(active_values[i_dim, :] - self._box[i_dim][1] > 0): - return False - return True - - def rebuild(self, values, active_mask=-1): - super().rebuild(values, active_mask) - active_values = self._values[:, self._active_mask] - - # Compute the dimensions of the box with a margin. - self._box = [] - for i_dim in range(3): - val_min = active_values[i_dim, :].min() - val_max = active_values[i_dim, :].max() - margin = (val_max - val_min) * 0.3 - self._box.append([val_min - margin, val_max + margin]) - - self._box = np.array(self._box) - - epsilon = 1e-8 - - # Compute the number of bits in each of the three dimensions - # E.g. if we have 3 bits (depth), we must have less than 2^3 cells in - # that direction. - n_bits = ((self._box[:, 1] - self._box[:, 0]) / self.inter_dist.reshape(-1) + epsilon) / np.log(2) - self._bits = np.ceil(n_bits).astype(int) - - # Compute the starting point of the cell (0, 0, 0). - self._min_box = self._box[:, 0] - self._min_box = self._min_box.reshape(-1, 1) - - # Compute the hash table. - particle_hashes = self._values_to_hashes(values, self.active_idx) - self._hashtable = hash_split(particle_hashes, active_idx=self.active_idx) - self._particle_hashes = particle_hashes - - # Keep track of the position of a particle index within a cell. - self._hash_idx = np.empty_like(self._particle_hashes, dtype=int) - for idx_array in self._hashtable.values(): - self._hash_idx[idx_array] = np.arange(len(idx_array)) - - def _values_to_hashes(self, values, active_idx=None): - if active_idx is None: - active_values = values - else: - active_values = values[:, active_idx] - - # Compute the box_id/hashes. - box_i = ((active_values - self._min_box) / self.inter_dist).astype(int) - particle_hashes = np.bitwise_or(box_i[0, :], np.left_shift(box_i[1, :], self._bits[0])) - - if active_values is None: - return particle_hashes - - # Put the hashes back - all_hashes = np.empty(values.shape[1], dtype=int) - all_hashes[active_idx] = particle_hashes - return all_hashes - - -def hash_to_neighbors(hash_id, bits): - """Compute neighboring cells from a hash. - - Parameters - ---------- - hash_id : - hash value of the current cell. - bits : - key to compute the hashes. - - Returns - ------- - type - neighbors: List of cells neighboring hash_id. - - """ - coor = np.zeros((len(bits),), dtype=np.int32) - new_coor = np.zeros((len(bits),), dtype=np.int32) - - # Compute the (ix, iy, iz) coordinates of the hash. - tot_bits = 0 - for dim in range(len(bits)): - coor[dim] = (hash_id >> tot_bits) & ((1 << bits[dim]) - 1) - tot_bits += bits[dim] - - coor_max = np.left_shift(1, bits) - - neighbors = [] - - # Loop over all 3^3 neighboring cells. - for offset in range(pow(3, len(bits))): - # Compute the integer coordinates of the neighboring cell. - divider = 1 - for dim in range(len(bits)): - new_coor[dim] = coor[dim] + (1 - ((offset // divider) % 3)) - divider *= 3 - - # Cell is outside the box/doesn't exist. - if np.any(new_coor > coor_max) or np.any(new_coor < 0): - continue - - # Compute the hash of the neighboring cell - new_hash = 0 - tot_bits = 0 - for dim in range(len(bits)): - new_hash |= new_coor[dim] << tot_bits - tot_bits += bits[dim] - neighbors.append(new_hash) - return neighbors diff --git a/src/parcels/interaction/neighborsearch/hashspherical.py b/src/parcels/interaction/neighborsearch/hashspherical.py deleted file mode 100644 index 32fff56f19..0000000000 --- a/src/parcels/interaction/neighborsearch/hashspherical.py +++ /dev/null @@ -1,178 +0,0 @@ -from math import ceil - -import numpy as np - -from parcels.interaction.neighborsearch.base import BaseSphericalNeighborSearch -from parcels.interaction.neighborsearch.basehash import ( - BaseHashNeighborSearch, - hash_split, -) - - -class HashSphericalNeighborSearch(BaseHashNeighborSearch, BaseSphericalNeighborSearch): - """Neighbor search using a hashtable (similar to octtrees). - - - Parameters - ---------- - inter_dist_vert : float - Interaction distance (vertical) in m. - inter_dist_horiz : float - interaction distance (horizontal) in m - max_depth : float, optional - Maximum depth of the particles (default is 100000m). - """ - - def __init__(self, inter_dist_vert, inter_dist_horiz, max_depth=100000): - super().__init__(inter_dist_vert, inter_dist_horiz, max_depth) - - self._init_structure() - - def _find_neighbors(self, hash_id, coor): - """Get neighbors from hash_id and location.""" - # Get the neighboring cells. - neighbor_blocks = geo_hash_to_neighbors(hash_id, coor, self._bits, self.inter_arc_dist) - all_neighbor_points = [] - - # Get the particles from the neighboring cells. - for block in neighbor_blocks: - try: - all_neighbor_points.extend(self._hashtable[block]) - except KeyError: - pass - - potential_neighbors = np.array(all_neighbor_points, dtype=int) - return self._get_close_neighbor_dist(coor, potential_neighbors) - - def _values_to_hashes(self, values, active_idx=None): - """Convert coordinates to cell ids. - - Parameters - ---------- - values : - array of positions of particles to convert - ([depth, lat, lon], # of particles to convert). - active_idx : - (Default value = None) - - Returns - ------- - type - array of cell ids. - - """ - if active_idx is None: - active_idx = np.arange(values.shape[1], dtype=int) - depth = values[0, active_idx] - lat = values[1, active_idx] - lon = values[2, active_idx] - - # Southern or Northern hemisphere. - lat_sign = (lat > 0).astype(int) - - # Find the latitude part of the cell id. - i_depth = np.floor(depth / self.inter_dist_vert).astype(int) - i_lat = np.floor(np.abs(lat) / self.inter_degree_dist).astype(int) - - # Get the arc length of the smaller circle around the earth. - circ_small = 2 * np.pi * np.cos((i_lat + 1) * self.inter_arc_dist) - n_lon = np.floor(circ_small / self.inter_arc_dist).astype(int) - n_lon[n_lon < 1] = 1 - d_lon = 360 / n_lon - - # Get the longitude part of the cell id. - i_lon = np.floor(lon / d_lon).astype(int) - - # Merge the 4 parts of the cell into one id. - point_hash = i_3d_to_hash(i_depth, i_lat, i_lon, lat_sign, self._bits) - point_array = np.empty(values.shape[1], dtype=int) - point_array[active_idx] = point_hash - return point_array - - def rebuild(self, values, active_mask=-1): - """Recreate the tree with new values. - - Parameters - ---------- - values : - positions of the particles. - active_mask : - (Default value = -1) - """ - super().rebuild(values, active_mask) - active_idx = self.active_idx - - # Compute the hash values: - self._particle_hashes = np.empty(self._values.shape[1], dtype=int) - self._particle_hashes[active_idx] = self._values_to_hashes(values[:, active_idx]) - - # Create the hashtable. - self._hashtable = hash_split(self._particle_hashes, active_idx=active_idx) - - # Keep track of the position of a particle index within a cell. - self._hash_idx = np.empty_like(self._particle_hashes, dtype=int) - for idx_array in self._hashtable.values(): - self._hash_idx[idx_array] = np.arange(len(idx_array)) - - def _init_structure(self): - """Initialize the basic tree properties without building""" - epsilon = 1e-12 - R_earth = 6371000 - - self.inter_arc_dist = self.inter_dist_horiz / R_earth - self.inter_degree_dist = 180 * self.inter_arc_dist / np.pi - n_lines_depth = int(ceil(self.max_depth / self.inter_dist_vert + epsilon)) - n_lines_lat = int(ceil(np.pi / self.inter_arc_dist + epsilon)) - n_lines_lon = int(ceil(2 * np.pi / self.inter_arc_dist + epsilon)) - n_bits_lat = ceil(np.log(n_lines_lat) / np.log(2)) - n_bits_lon = ceil(np.log(n_lines_lon) / np.log(2)) - n_bits_depth = ceil(np.log(n_lines_depth) / np.log(2)) - self._bits = np.array([n_bits_depth, n_bits_lat, n_bits_lon]) - - -def i_3d_to_hash(i_depth, i_lat, i_lon, lat_sign, bits): - """Convert longitude and latitude id's to hash""" - point_hash = lat_sign - point_hash = np.bitwise_or(point_hash, np.left_shift(i_depth, 1)) - point_hash = np.bitwise_or(point_hash, np.left_shift(i_lat, 1 + bits[0])) - point_hash = np.bitwise_or(point_hash, np.left_shift(i_lon, 1 + bits[0] + bits[1])) - return point_hash - - -def geo_hash_to_neighbors(hash_id, coor, bits, inter_arc_dist): - """Compute the hashes of all neighboring cells in a 3x3x3 neighborhood.""" - lat_sign = hash_id & 0x1 - i_depth = (hash_id >> 1) & ((1 << bits[0]) - 1) - i_lat = (hash_id >> (1 + bits[0])) & ((1 << bits[1]) - 1) - - def all_neigh_depth(i_lat, i_lon, lat_sign): - hashes = [] - for d_depth in [-1, 0, 1]: - new_depth = i_depth + d_depth - if new_depth < 0: - continue - hashes.append(i_3d_to_hash(new_depth, i_lat, i_lon, lat_sign, bits)) - return hashes - - neighbors = [] - # Loop over lower row, middle row, upper row - for i_d_lat in [-1, 0, 1]: - new_lat_sign = lat_sign - new_i_lat = i_lat + i_d_lat - if new_i_lat == -1: - new_i_lat = 0 - new_lat_sign = 1 - lat_sign - - min_lat = new_i_lat + 1 - circ_small = 2 * np.pi * np.cos(min_lat * inter_arc_dist) - n_new_lon = int(max(1, np.floor(circ_small / inter_arc_dist))) - d_lon = 360 / n_new_lon - if n_new_lon <= 3: - for new_i_lon in range(n_new_lon): - neighbors.extend(all_neigh_depth(new_i_lat, new_i_lon, new_lat_sign)) - else: - start_i_lon = int(np.floor(coor[2][0] / d_lon)) - for delta_lon in [-1, 0, 1]: - new_i_lon = (start_i_lon + delta_lon + n_new_lon) % n_new_lon - neighbors.extend(all_neigh_depth(new_i_lat, new_i_lon, new_lat_sign)) - return neighbors diff --git a/src/parcels/interaction/neighborsearch/kdtreeflat.py b/src/parcels/interaction/neighborsearch/kdtreeflat.py deleted file mode 100644 index c6730dedae..0000000000 --- a/src/parcels/interaction/neighborsearch/kdtreeflat.py +++ /dev/null @@ -1,18 +0,0 @@ -import numpy as np -from scipy.spatial import KDTree - -from parcels.interaction.neighborsearch.base import BaseFlatNeighborSearch - - -class KDTreeFlatNeighborSearch(BaseFlatNeighborSearch): - def find_neighbors_by_coor(self, coor): - coor = coor.reshape(3, 1) - corrected_coor = (coor / self.inter_dist).reshape(-1) - rel_idx = np.array(self._kdtree.query_ball_point(corrected_coor, r=1)) - neighbor_idx = self.active_idx[rel_idx] - return neighbor_idx, np.vstack(self._distance(coor, neighbor_idx)) - - def rebuild(self, values=None, active_mask=-1): - super().rebuild(values, active_mask) - self._corrected_values = values[:, self._active_idx] / self.inter_dist - self._kdtree = KDTree(self._corrected_values.T) diff --git a/src/parcels/kernels/__init__.py b/src/parcels/kernels/__init__.py index 9b2a3fc9d9..b4635869af 100644 --- a/src/parcels/kernels/__init__.py +++ b/src/parcels/kernels/__init__.py @@ -13,11 +13,6 @@ AdvectionDiffusionM1, DiffusionUniformKh, ) -from .interaction import ( - AsymmetricAttraction, - MergeWithNearestNeighbor, - NearestNeighborWithinRange, -) __all__ = [ # noqa: RUF022 # advection @@ -33,8 +28,4 @@ "AdvectionDiffusionEM", "AdvectionDiffusionM1", "DiffusionUniformKh", - # interaction - "AsymmetricAttraction", - "MergeWithNearestNeighbor", - "NearestNeighborWithinRange", ] diff --git a/src/parcels/kernels/interaction.py b/src/parcels/kernels/interaction.py deleted file mode 100644 index 2eb919ef9d..0000000000 --- a/src/parcels/kernels/interaction.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Collection of pre-built interaction kernels.""" - -import numpy as np - -from parcels._core.statuscodes import StatusCode - -__all__ = ["AsymmetricAttraction", "MergeWithNearestNeighbor", "NearestNeighborWithinRange"] - - -def NearestNeighborWithinRange(particle, fieldset, time, neighbors, mutator): - """Computes the nearest neighbor within range for each particle. - - Particle has to have the nearest_neighbor property. If no particle - is in range, set nearest_neighbor property to -1. - """ - min_dist = -1 - neighbor_id = -1 - for n in neighbors: - # Note that with interacting particles p.surf_dist, p.depth_dist are - # automatically set to be the distance along the surface and - # z-direction respectively. - dist = np.sqrt(n.horiz_dist**2 + n.vert_dist**2) - # Note that in case of a tie, the particle with the lowest ID - # wins. In certain adversarial cases, this might lead to - # undesirable results. - if dist < min_dist or min_dist < 0: - min_dist = dist - neighbor_id = n.trajectory - - def f(p, neighbor): - p.nearest_neighbor = neighbor - - mutator[particle.trajectory].append((f, [neighbor_id])) - - return StatusCode.Success - - -def MergeWithNearestNeighbor(particle, fieldset, time, neighbors, mutator): - """Perform merge action with nearest neighbor. - - Depends on NearestNeighborWithinRange kernel, or one that provides similar - functionality. Particle has to have the nearest_neighbor and mass - properties. Only pairs of particles that have each other as nearest - neighbors will be merged. - """ - - def delete_particle(p): - p.state = StatusCode.Delete - - def merge_with_neighbor(p, nlat, nlon, ndepth, nmass): - p.lat_nextloop = (p.mass * p.lat + nmass * nlat) / (p.mass + nmass) - p.lon_nextloop = (p.mass * p.lon + nmass * nlon) / (p.mass + nmass) - p.depth_nextloop = (p.mass * p.depth + nmass * ndepth) / (p.mass + nmass) - p.mass = p.mass + nmass - - for n in neighbors: - if n.trajectory == particle.nearest_neighbor: - if n.nearest_neighbor == particle.trajectory and particle.trajectory < n.trajectory: - # Merge particles: - # Delete neighbor - mutator[n.trajectory].append((delete_particle, ())) - # Take position at the mid point and sum of masses - args = np.array([n.lat, n.lon, n.depth, n.mass]) - mutator[particle.trajectory].append((merge_with_neighbor, args)) - - return StatusCode.Success - else: - return StatusCode.Success - - return StatusCode.Success - - -def AsymmetricAttraction(particle, fieldset, time, neighbors, mutator): - """Example of asymmetric attraction between particles. - - Particles should have the attractor attribute. If attractor==True, then - it attracts particles around it, but doesn't experience any attraction - itself. Particles with attractor=False are only attracted to attractors. - Works only properly on a flat mesh (because of vector calculations). - """ - na_neighbors = [] - if not particle.attractor: - return StatusCode.Success - for n in neighbors: - if n.attractor: - continue - na_neighbors.append(n) - - velocity_param = 0.04 - for n in na_neighbors: - assert n.dt == particle.dt - dx = np.array([particle.lat - n.lat, particle.lon - n.lon, particle.depth - n.depth]) - dx_norm = np.linalg.norm(dx) - velocity = velocity_param / (dx_norm**2) - - distance = velocity * n.dt - d_vec = distance * dx / dx_norm - - def f(n, dlat, dlon, ddepth): - n.lat_nextloop += dlat - n.lon_nextloop += dlon - n.depth_nextloop += ddepth - - mutator[n.trajectory].append((f, d_vec)) - - return StatusCode.Success diff --git a/tests-v3/test_interaction.py b/tests-v3/test_interaction.py deleted file mode 100644 index c20fdda881..0000000000 --- a/tests-v3/test_interaction.py +++ /dev/null @@ -1,304 +0,0 @@ -import numpy as np -import pytest - -from parcels import Field, FieldSet, ParticleSet -from parcels.application_kernels.advection import AdvectionRK4 -from parcels.application_kernels.interaction import ( - AsymmetricAttraction, - MergeWithNearestNeighbor, - NearestNeighborWithinRange, -) -from parcels.interaction.neighborsearch import ( - BruteFlatNeighborSearch, - BruteSphericalNeighborSearch, - HashFlatNeighborSearch, - HashSphericalNeighborSearch, - KDTreeFlatNeighborSearch, -) -from parcels.interaction.neighborsearch.basehash import BaseHashNeighborSearch -from parcels.particle import InteractionParticle, Variable -from tests.common_kernels import DoNothing -from tests.utils import create_fieldset_unit_mesh, create_flat_positions, create_spherical_positions - - -def DummyMoveNeighbor(particle, fieldset, time, neighbors, mutator): - """A particle boosts the movement of its nearest neighbor, by adding 0.1 to its lat position.""" - if len(neighbors) == 0: - pass - - distances = [np.sqrt(n.vert_dist**2 + n.horiz_dist**2) for n in neighbors] - i_min_dist = np.argmin(distances) - - def f(p): - p.lat_nextloop += 0.1 - - neighbor_id = neighbors[i_min_dist].trajectory - mutator[neighbor_id].append((f, ())) - - pass - - -@pytest.fixture -def fieldset_unit_mesh(): - return create_fieldset_unit_mesh(mesh="spherical") - - -def test_simple_interaction_kernel(fieldset_unit_mesh): - lons = [0.0, 0.1, 0.25, 0.44] - lats = [0.0, 0.0, 0.0, 0.0] - # Distance in meters R_earth*0.2 degrees - interaction_distance = 6371000 * 0.2 * np.pi / 180 - pset = ParticleSet( - fieldset_unit_mesh, - pclass=InteractionParticle, - lon=lons, - lat=lats, - interaction_distance=interaction_distance, - ) - pset.execute(DoNothing, pyfunc_inter=DummyMoveNeighbor, endtime=2.0, dt=1.0) - assert np.allclose(pset.lat, [0.1, 0.2, 0.1, 0.0], rtol=1e-5) - - -@pytest.mark.parametrize("mesh", ["spherical", "flat"]) -@pytest.mark.parametrize("periodic_domain_zonal", [False, True]) -def test_zonal_periodic_distance(mesh, periodic_domain_zonal): - fset = create_fieldset_unit_mesh(mesh=mesh) - interaction_distance = 0.2 if mesh == "flat" else 6371000 * 0.2 * np.pi / 180 - lons = [0.05, 0.4, 0.95] - pset = ParticleSet( - fset, - pclass=InteractionParticle, - lon=lons, - lat=[0.5] * len(lons), - interaction_distance=interaction_distance, - periodic_domain_zonal=periodic_domain_zonal, - ) - pset.execute(DoNothing, pyfunc_inter=DummyMoveNeighbor, endtime=2.0, dt=1.0) - if periodic_domain_zonal: - assert np.allclose([pset[0].lat, pset[2].lat], 0.6) - assert np.allclose(pset[1].lat, 0.5) - else: - assert np.allclose([p.lat for p in pset], 0.5) - - -def test_concatenate_interaction_kernels(fieldset_unit_mesh): - lons = [0.0, 0.1, 0.25, 0.44] - lats = [0.0, 0.0, 0.0, 0.0] - # Distance in meters R_earth*0.2 degrees - interaction_distance = 6371000 * 0.2 * np.pi / 180 - - pset = ParticleSet( - fieldset_unit_mesh, - pclass=InteractionParticle, - lon=lons, - lat=lats, - interaction_distance=interaction_distance, - ) - pset.execute( - DoNothing, - pyfunc_inter=pset.InteractionKernel(DummyMoveNeighbor) + pset.InteractionKernel(DummyMoveNeighbor), - endtime=2.0, - dt=1.0, - ) - # The kernel results are only applied after all interactionkernels - # have been executed, so we expect the result to be double the - # movement from executing the kernel once. - assert np.allclose(pset.lat, [0.2, 0.4, 0.2, 0.0], rtol=1e-5) - - -def test_concatenate_interaction_kernels_as_pyfunc(fieldset_unit_mesh): - lons = [0.0, 0.1, 0.25, 0.44] - lats = [0.0, 0.0, 0.0, 0.0] - # Distance in meters R_earth*0.2 degrees - interaction_distance = 6371000 * 0.2 * np.pi / 180 - - pset = ParticleSet( - fieldset_unit_mesh, - pclass=InteractionParticle, - lon=lons, - lat=lats, - interaction_distance=interaction_distance, - ) - pset.execute( - DoNothing, pyfunc_inter=pset.InteractionKernel(DummyMoveNeighbor) + DummyMoveNeighbor, endtime=2.0, dt=1.0 - ) - # The kernel results are only applied after all interactionkernels - # have been executed, so we expect the result to be double the - # movement from executing the kernel once. - assert np.allclose(pset.lat, [0.2, 0.4, 0.2, 0.0], rtol=1e-5) - - -def test_neighbor_merge(fieldset_unit_mesh): - lons = [0.0, 0.1, 0.25, 0.44] - lats = [0.0, 0.0, 0.0, 0.0] - # Distance in meters R_earth*0.2 degrees - interaction_distance = 6371000 * 5.5 * np.pi / 180 - MergeParticle = InteractionParticle.add_variables( - [Variable("nearest_neighbor", dtype=np.int64, to_write=False), Variable("mass", initial=1, dtype=np.float32)] - ) - pset = ParticleSet( - fieldset_unit_mesh, pclass=MergeParticle, lon=lons, lat=lats, interaction_distance=interaction_distance - ) - pyfunc_inter = pset.InteractionKernel(NearestNeighborWithinRange) + MergeWithNearestNeighbor - pset.execute(DoNothing, pyfunc_inter=pyfunc_inter, runtime=3.0, dt=1.0) - - # After two steps, the particles should be removed. - assert len(pset) == 1 - - -def test_asymmetric_attraction(fieldset_unit_mesh): - lons = [0.0, 0.1, 0.2] - lats = [0.0, 0.0, 0.0] - # Distance in meters R_earth*0.2 degrees - interaction_distance = 6371000 * 5.5 * np.pi / 180 - AttractingParticle = InteractionParticle.add_variable("attractor", dtype=np.bool_, to_write="once") - pset = ParticleSet( - fieldset_unit_mesh, - pclass=AttractingParticle, - lon=lons, - lat=lats, - interaction_distance=interaction_distance, - attractor=[True, False, False], - ) - pyfunc_inter = pset.InteractionKernel(AsymmetricAttraction) - pset.execute(DoNothing, pyfunc_inter=pyfunc_inter, runtime=3.0, dt=1.0) - - assert lons[1] > pset.lon[1] - assert lons[2] > pset.lon[2] - assert len(pset) == 3 - - -def ConstantMoveInteraction(particle, fieldset, time, neighbors, mutator): - def f(p): - p.lat_nextloop += p.dt - - mutator[particle.trajectory].append((f, ())) - - -@pytest.mark.parametrize("runtime, dt", [(1, 1e-2), (1, -2.123e-3), (1, -3.12452 - 3)]) -def test_pseudo_interaction(runtime, dt): - # A linear field where advected particles are moving at - # 1 m/s in the longitudinal direction. - xdim, ydim = (10, 20) - Uflow = Field( - "U", - np.ones((ydim, xdim), dtype=np.float64), - lon=np.linspace(0.0, 1e3, xdim, dtype=np.float64), - lat=np.linspace(0.0, 1e3, ydim, dtype=np.float64), - ) - Vflow = Field("V", np.zeros((ydim, xdim), dtype=np.float64), grid=Uflow.grid) - fieldset = FieldSet(Uflow, Vflow) - - # Execute the advection kernel only - pset = ParticleSet(fieldset, pclass=InteractionParticle, lon=[2], lat=[2]) - pset.execute(AdvectionRK4, runtime=runtime, dt=dt) - - # Execute both the advection and interaction kernel. - pset2 = ParticleSet(fieldset, pclass=InteractionParticle, lon=[2], lat=[2], interaction_distance=1) - pyfunc_inter = pset2.InteractionKernel(ConstantMoveInteraction) - pset2.execute(AdvectionRK4, pyfunc_inter=pyfunc_inter, runtime=runtime, dt=dt) - - # Check to see whether they have moved as predicted. - assert np.all(pset.lon == pset2.lon) - assert np.all(pset2.lat == pset2.lon) - assert np.all(pset2.particledata.data["time"][0] == pset.particledata.data["time"][0]) - - -def compare_results_by_idx(instance, particle_idx, ref_result, active_idx=None): - cur_neigh, _ = instance.find_neighbors_by_idx(particle_idx) - assert isinstance(cur_neigh, np.ndarray) - assert len(cur_neigh) == len(set(cur_neigh)) - if active_idx is None: - active_idx = np.arange(instance._values.shape[1]) - if isinstance(instance, BaseHashNeighborSearch): - instance.consistency_check() - for neigh in cur_neigh: - assert neigh in active_idx - assert set(cur_neigh) <= set(active_idx) - neigh_by_coor, _ = instance.find_neighbors_by_coor(instance._values[:, particle_idx]) - assert np.allclose(cur_neigh, neigh_by_coor) - - assert isinstance(cur_neigh, np.ndarray) - assert set(ref_result) == set(cur_neigh) - - -@pytest.mark.parametrize("test_class", [KDTreeFlatNeighborSearch, HashFlatNeighborSearch, BruteFlatNeighborSearch]) -def test_flat_neighbors(test_class): - np.random.seed(129873) - ref_class = BruteFlatNeighborSearch - n_particle = 1000 - positions = np.random.rand(n_particle * 3).reshape(3, n_particle) - ref_instance = ref_class(inter_dist_vert=0.3, inter_dist_horiz=0.3) - test_instance = test_class(inter_dist_vert=0.3, inter_dist_horiz=0.3) - ref_instance.rebuild(positions) - test_instance.rebuild(positions) - - for particle_idx in np.random.choice(positions.shape[1], 100, replace=False): - ref_result, _ = ref_instance.find_neighbors_by_idx(particle_idx) - compare_results_by_idx(test_instance, particle_idx, ref_result) - - -@pytest.mark.parametrize("test_class", [BruteSphericalNeighborSearch, HashSphericalNeighborSearch]) -def test_spherical_neighbors(test_class): - np.random.seed(9837452) - ref_class = BruteSphericalNeighborSearch - - positions = create_spherical_positions(10000, max_depth=100000) - ref_instance = ref_class(inter_dist_vert=100000, inter_dist_horiz=1000000) - test_instance = test_class(inter_dist_vert=100000, inter_dist_horiz=1000000) - ref_instance.rebuild(positions) - test_instance.rebuild(positions) - - for particle_idx in np.random.choice(positions.shape[1], 100, replace=False): - ref_result, _ = ref_instance.find_neighbors_by_idx(particle_idx) - compare_results_by_idx(test_instance, particle_idx, ref_result) - - -@pytest.mark.parametrize("test_class", [KDTreeFlatNeighborSearch, HashFlatNeighborSearch, BruteFlatNeighborSearch]) -def test_flat_update(test_class): - np.random.seed(9182741) - n_particle = 1000 - n_test_particle = 10 - n_active_mask = 10 - ref_class = BruteFlatNeighborSearch - ref_instance = ref_class(inter_dist_vert=0.3, inter_dist_horiz=0.3) - test_instance = test_class(inter_dist_vert=0.3, inter_dist_horiz=0.3) - - for _ in range(1, n_active_mask): - positions = create_flat_positions(n_particle) + 10 * np.random.rand() - active_mask = np.random.rand(n_particle) > 0.5 - ref_instance.update_values(positions, active_mask) - test_instance.update_values(positions, active_mask) - active_idx = np.where(active_mask)[0] - if len(active_idx) == 0: - continue - test_particles = np.random.choice(active_idx, size=min(n_test_particle, len(active_idx)), replace=False) - for particle_idx in test_particles: - ref_result, _ = ref_instance.find_neighbors_by_idx(particle_idx) - compare_results_by_idx(test_instance, particle_idx, ref_result, active_idx=active_idx) - - -@pytest.mark.parametrize("test_class", [BruteSphericalNeighborSearch, HashSphericalNeighborSearch]) -def test_spherical_update(test_class): - np.random.seed(9182741) - n_particle = 1000 - n_test_particle = 10 - n_active_mask = 10 - ref_class = BruteSphericalNeighborSearch - - ref_instance = ref_class(inter_dist_vert=100000, inter_dist_horiz=1000000) - test_instance = test_class(inter_dist_vert=100000, inter_dist_horiz=1000000) - - for _ in range(n_active_mask): - positions = create_spherical_positions(n_particle) - active_mask = np.random.rand(n_particle) > 0.5 - ref_instance.update_values(positions, active_mask) - test_instance.update_values(positions, active_mask) - - active_idx = np.where(active_mask)[0] - if len(active_idx) == 0: - continue - test_particles = np.random.choice(active_idx, size=min(n_test_particle, len(active_idx)), replace=False) - for particle_idx in test_particles: - ref_result, _ = ref_instance.find_neighbors_by_idx(particle_idx) - compare_results_by_idx(test_instance, particle_idx, ref_result, active_idx=active_idx)