diff --git a/.github/ISSUE_TEMPLATE/1-bug_report.yaml b/.github/ISSUE_TEMPLATE/1-bug_report.yaml index 735bf2b612c..882a7310570 100644 --- a/.github/ISSUE_TEMPLATE/1-bug_report.yaml +++ b/.github/ISSUE_TEMPLATE/1-bug_report.yaml @@ -1,15 +1,15 @@ name: Bug report -description: Report a problem/bug to help us improve +description: Report a problem/bug to help us improve. labels: bug body: - type: markdown attributes: - value: | + value: > Thanks for taking the time to fill out this bug report! - type: textarea attributes: label: "Description of the problem" - description: | + description: > Please be as detailed as you can when describing an issue. The more information we have, the easier it will be for us to track this down. validations: @@ -17,12 +17,14 @@ body: - type: textarea attributes: label: "Minimal Complete Verifiable Example" - description: | + description: > So that we can understand and fix the issue quickly and efficiently, please provide a minimal, self-contained copy-pastable example that demonstrates the issue. + For more details, check out: - [Minimal Complete Verifiable Examples](https://stackoverflow.com/help/mcve) + - [Craft Minimal Bug Reports](https://matthewrocklin.com/minimal-bug-reports) placeholder: "PASTE CODE HERE" @@ -40,9 +42,11 @@ body: - type: textarea attributes: label: "System information" - description: | - Please paste the output of `python -c "import pygmt; pygmt.show_versions()"` - If this command is not successful, please describe your operating system, how you installed PyGMT, how you installed GMT, and paste the full error message. + description: > + Please paste the output of `python -c "import pygmt; pygmt.show_versions()"`. + + If this command is not successful, please describe your operating system, + how you installed PyGMT, how you installed GMT, and paste the full error message. placeholder: "PASTE THE OUTPUT HERE" render: bash validations: diff --git a/.github/ISSUE_TEMPLATE/2-feature_request.yaml b/.github/ISSUE_TEMPLATE/2-feature_request.yaml index 71ca1be6091..096ea2b283d 100644 --- a/.github/ISSUE_TEMPLATE/2-feature_request.yaml +++ b/.github/ISSUE_TEMPLATE/2-feature_request.yaml @@ -5,16 +5,17 @@ body: - type: textarea attributes: label: "Description of the desired feature" - description: | - Please be as detailed as you can in your description. - If possible, include an example of how you would like to use this feature (even better if it's a code example). + description: > + Please be as detailed as you can in your description. If possible, include + an example of how you would like to use this feature (even better if it's a code example). - type: dropdown id: help attributes: label: Are you willing to help implement and maintain this feature? - description: | - Every feature we add is code that we will have to maintain and keep updated. This takes a lot of effort. - If you are willing to be involved in the project and help maintain your feature, it will make it easier for us to accept it. + description: > + Every feature we add is code that we will have to maintain and keep updated. + This takes a lot of effort. If you are willing to be involved in the project and + help maintain your feature, it will make it easier for us to accept it. options: - "No" - "Yes" diff --git a/.github/ISSUE_TEMPLATE/3-module_request.yaml b/.github/ISSUE_TEMPLATE/3-module_request.yaml index 48ad614b226..3c6d6377c04 100644 --- a/.github/ISSUE_TEMPLATE/3-module_request.yaml +++ b/.github/ISSUE_TEMPLATE/3-module_request.yaml @@ -5,23 +5,29 @@ labels: ["feature request"] body: - type: markdown attributes: - value: | - Please replace `` in the issue title and the description with the name of the requested module and add the description of the module. + value: > + Please replace `` in the issue title and the description with the + name of the requested module and add the description of the module. - type: textarea id: which-module attributes: label: Description of the desired module - description: Please be as detailed as you can in your description. If possible, include an example of how you would like to use this feature (even better if it's a code example). - placeholder: Implement [``](https://docs.generic-mapping-tools.org/latest/.html) which ``. + description: > + Please be as detailed as you can in your description. If possible, include + an example of how you would like to use this feature (even better if it's a code example). + placeholder: > + Implement [``](https://docs.generic-mapping-tools.org/latest/.html) + which ``. validations: required: true - type: dropdown id: help attributes: label: Are you willing to help implement and maintain this feature? - description: | - Every feature we add is code that we will have to maintain and keep updated. This takes a lot of effort. - If you are willing to be involved in the project and help maintain your feature, it will make it easier for us to accept it. + description: > + Every feature we add is code that we will have to maintain and keep updated. + This takes a lot of effort. If you are willing to be involved in the project and + help maintain your feature, it will make it easier for us to accept it. options: - "No" - "Yes" @@ -30,5 +36,6 @@ body: required: true - type: markdown attributes: - value: | - Progress on wrapping the module will be tracked in the [project board](https://github.com/GenericMappingTools/pygmt/projects/9). + value: > + Progress on wrapping the module will be tracked in the + [project board](https://github.com/orgs/GenericMappingTools/projects/3). diff --git a/.github/ISSUE_TEMPLATE/4-release_checklist.md b/.github/ISSUE_TEMPLATE/4-release_checklist.md index 59919b8fb25..87a164b15c8 100644 --- a/.github/ISSUE_TEMPLATE/4-release_checklist.md +++ b/.github/ISSUE_TEMPLATE/4-release_checklist.md @@ -1,6 +1,6 @@ --- name: PyGMT release checklist -about: Checklist for a new PyGMT release. +about: Checklist for a new PyGMT release. [For project maintainers only!] title: Release PyGMT vX.Y.Z labels: maintenance assignees: '' @@ -19,17 +19,15 @@ assignees: '' **Before release**: -- [ ] Check [SPEC 0](https://scientific-python.org/specs/spec-0000/) to see if we need to bump the minimum supported versions of GMT, Python and - core package dependencies (NumPy, pandas, Xarray) +- [ ] Check [SPEC 0](https://scientific-python.org/specs/spec-0000/) to see if we need to bump the minimum supported versions of GMT, Python and core package dependencies (NumPy, pandas, Xarray) - [ ] Review the ["PyGMT Team" page](https://www.pygmt.org/dev/team.html) +- [ ] README looks good on TestPyPI. Visit [TestPyPI](https://test.pypi.org/project/pygmt/#history), click the latest pre-release, and check the homepage. - [ ] Check to ensure that: - - [ ] Deprecations and related tests are removed for this version by running `grep --include="*.py" -r vX.Y.Z` from the base of the repository + - [ ] Deprecated workarounds/codes/tests are removed. Run `grep "# TODO" **/*.py` to find all potential TODOs. - [ ] All tests pass in the ["GMT Legacy Tests" workflow](https://github.com/GenericMappingTools/pygmt/actions/workflows/ci_tests_legacy.yaml) - [ ] All tests pass in the ["GMT Dev Tests" workflow](https://github.com/GenericMappingTools/pygmt/actions/workflows/ci_tests_dev.yaml) - [ ] All tests pass in the ["Doctests" workflow](https://github.com/GenericMappingTools/pygmt/actions/workflows/ci_doctests.yaml) -- [ ] Update warnings in `pygmt/_show_versions.py` as well as notes in - [Not working transparency](https://www.pygmt.org/dev/install.html#not-working-transparency) - regarding GMT-Ghostscript incompatibility +- [ ] Update warnings in `pygmt/_show_versions.py` as well as notes in [Not working transparency](https://www.pygmt.org/dev/install.html#not-working-transparency) regarding GMT-Ghostscript incompatibility - [ ] Reserve a DOI on [Zenodo](https://zenodo.org) by clicking on "New Version" - [ ] Finish up the "Changelog entry for v0.x.x" Pull Request (Use the previous changelog PR as a reference) - [ ] Run `make codespell` to check common misspellings. If there are any, either fix them or add them to `ignore-words-list` in `pyproject.toml` @@ -41,18 +39,16 @@ assignees: '' - [ ] Edit the draft release notes with the finalized changelog - [ ] Set the tag version and release title to vX.Y.Z - [ ] Make a release by clicking the 'Publish Release' button, this will automatically create a tag too -- [ ] Download pygmt-X.Y.Z.zip (rename to pygmt-vX.Y.Z.zip) and baseline-images.zip from - the release page, and upload the two zip files to https://zenodo.org/deposit, - ensure that they are filed under the correct reserved DOI +- [ ] Download pygmt-X.Y.Z.zip (rename to pygmt-vX.Y.Z.zip) and baseline-images.zip from the release page, and upload the two zip files to https://zenodo.org/deposit, ensure that they are filed under the correct reserved DOI **After release**: -- [ ] Update conda-forge [pygmt-feedstock](https://github.com/conda-forge/pygmt-feedstock) - [Done automatically by conda-forge's bot. Remember to pin Python and SPEC0 versions] +- [ ] Update conda-forge [pygmt-feedstock](https://github.com/conda-forge/pygmt-feedstock) [Done automatically by conda-forge's bot. Remember to pin GMT, Python and SPEC0 versions] - [ ] Bump PyGMT version on https://github.com/GenericMappingTools/try-gmt (after conda-forge update) - [ ] Announce the release on: - [ ] GMT [forum](https://forum.generic-mapping-tools.org/c/news/) (do this announcement first! Requires moderator status) - - [ ] [ResearchGate](https://www.researchgate.net) (after forum announcement, add new version as research item via the **code** category, be sure to include the corresponding new Zenodo DOI) + - [ ] [ResearchGate](https://www.researchgate.net) (after forum announcement; download the ZIP file of the new release from the release page and add it as research item via the **code** category, be sure to include the corresponding new Zenodo DOI) +- [ ] Update release checklist template with any additional bullet points that may have arisen during the release --- diff --git a/.github/ISSUE_TEMPLATE/5-bump_gmt_checklist.md b/.github/ISSUE_TEMPLATE/5-bump_gmt_checklist.md index 9652f2150ee..1ca5117ff5f 100644 --- a/.github/ISSUE_TEMPLATE/5-bump_gmt_checklist.md +++ b/.github/ISSUE_TEMPLATE/5-bump_gmt_checklist.md @@ -1,6 +1,6 @@ --- name: Bump GMT version checklist -about: Checklist for bumping the minimum required GMT version. +about: Checklist for bumping the minimum required GMT version. [For project maintainers only!] title: Bump to GMT X.Y.Z labels: maintenance assignees: '' diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 14b75d86f5a..0c259cf5672 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -20,7 +20,6 @@ Fixes # - [ ] Write detailed docstrings for all functions/methods. - [ ] If wrapping a new module, open a 'Wrap new GMT module' issue and submit reasonably-sized PRs. - [ ] If adding new functionality, add an example to docstrings or tutorials. -- [ ] Use underscores (not hyphens) in names of Python files and directories. **Slash Commands** diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index ccb22dbe5a2..f219ae76bdc 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -45,7 +45,7 @@ jobs: # Install Micromamba with conda-forge dependencies - name: Setup Micromamba - uses: mamba-org/setup-micromamba@v2.0.1 + uses: mamba-org/setup-micromamba@v2.0.4 with: environment-name: pygmt condarc: | @@ -58,14 +58,14 @@ jobs: cache-environment-key: micromamba-environment-${{ steps.date.outputs.date }} create-args: >- gmt=6.5.0 - python=3.12 + python=3.13 numpy pandas xarray netCDF4 packaging geopandas - pyarrow + pyarrow-core pytest pytest-codspeed pytest-mpl @@ -87,8 +87,8 @@ jobs: # Run the benchmark tests - name: Run benchmarks - uses: CodSpeedHQ/action@v3.1.0 + uses: CodSpeedHQ/action@v3.2.0 with: # 'bash -el -c' is needed to use the custom shell. # See https://github.com/CodSpeedHQ/action/issues/65. - run: bash -el -c "python -c \"import pygmt; pygmt.show_versions()\"; PYGMT_USE_EXTERNAL_DISPLAY=false python -m pytest -r P --pyargs pygmt --codspeed" + run: bash -el -c "python -c \"import pygmt; pygmt.show_versions()\"; PYGMT_USE_EXTERNAL_DISPLAY=false python -m pytest -r P --pyargs pygmt --codspeed --override-ini addopts='--verbose'" diff --git a/.github/workflows/cache_data.yaml b/.github/workflows/cache_data.yaml index e9b829dc4a2..46c37211424 100644 --- a/.github/workflows/cache_data.yaml +++ b/.github/workflows/cache_data.yaml @@ -43,7 +43,7 @@ jobs: # Install Micromamba with conda-forge dependencies - name: Setup Micromamba - uses: mamba-org/setup-micromamba@v2.0.1 + uses: mamba-org/setup-micromamba@v2.0.4 with: environment-name: pygmt condarc: | @@ -51,7 +51,7 @@ jobs: - conda-forge - nodefaults create-args: >- - python=3.12 + python=3.13 gmt=6.5.0 numpy pandas @@ -76,7 +76,7 @@ jobs: # Upload the downloaded files as artifacts to GitHub - name: Upload artifacts to GitHub - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.6.0 with: name: gmt-cache include-hidden-files: true diff --git a/.github/workflows/check-links.yml b/.github/workflows/check-links.yml index 40d8c870b54..0ed425da8d6 100644 --- a/.github/workflows/check-links.yml +++ b/.github/workflows/check-links.yml @@ -35,7 +35,7 @@ jobs: - name: Link Checker id: lychee - uses: lycheeverse/lychee-action@v2.0.2 + uses: lycheeverse/lychee-action@v2.2.0 with: fail: false # Don't fail action on broken links output: /tmp/lychee-out.md diff --git a/.github/workflows/ci_docs.yml b/.github/workflows/ci_docs.yml index 4e85e10dae5..2d9ebe31907 100644 --- a/.github/workflows/ci_docs.yml +++ b/.github/workflows/ci_docs.yml @@ -55,7 +55,7 @@ jobs: # Is it a draft Pull Request (true or false)? isDraft: - ${{ github.event.pull_request.draft }} - # Only run one job (Ubuntu + Python 3.12) for draft PRs + # Only run jobs on Ubuntu for draft PRs exclude: - os: macos-latest isDraft: true @@ -80,7 +80,7 @@ jobs: # Install Micromamba with conda-forge dependencies - name: Setup Micromamba - uses: mamba-org/setup-micromamba@v2.0.1 + uses: mamba-org/setup-micromamba@v2.0.4 with: environment-name: pygmt condarc: | @@ -92,7 +92,7 @@ jobs: # environment cache is persistent for one week. cache-environment-key: micromamba-environment-${{ steps.date.outputs.date }} create-args: >- - python=3.12 + python=3.13 gmt=6.5.0 ghostscript=10.04.0 numpy @@ -101,21 +101,25 @@ jobs: netCDF4 packaging contextily - geopandas<1.0 + geopandas ipython - pyarrow + pyarrow-core rioxarray make pip python-build + geodatasets myst-nb panel - sphinx + sphinx>=6.2 sphinx-autodoc-typehints sphinx-copybutton sphinx-design sphinx-gallery sphinx_rtd_theme<3.0 + cairosvg + sphinxcontrib-svg2pdfconverter + tectonic # Download cached remote files (artifacts) from GitHub - name: Download remote data from GitHub @@ -134,9 +138,11 @@ jobs: python -m build --sdist python -m pip install dist/* - # Build the documentation - - name: Build the documentation - run: make -C doc clean all + - name: Build the HTML documentation + run: make -C doc clean html + + - name: Build the PDF documentation + run: make -C doc pdf - name: Checkout the gh-pages branch uses: actions/checkout@v4.2.2 @@ -162,9 +168,14 @@ jobs: # to get the right commit hash. message="Deploy $version from $(git rev-parse --short HEAD)" cd deploy - # Need to have this file so that GitHub doesn't try to run Jekyll + # Create some files in the root directory. + # .nojekyll: Need to have this file so that GitHub doesn't try to run Jekyll touch .nojekyll - # Delete all the files and replace with our new set + # CNAME: Set the custom domain name + echo "www.pygmt.org" > CNAME + # index.html: Redirect to the latest version + echo '' > index.html + # Delete all the files and replace with our new set echo -e "\nRemoving old files from previous builds of ${version}:" rm -rvf ${version} echo -e "\nCopying HTML files to ${version}:" diff --git a/.github/workflows/ci_doctests.yaml b/.github/workflows/ci_doctests.yaml index 9f9f874b30b..af28e6ac710 100644 --- a/.github/workflows/ci_doctests.yaml +++ b/.github/workflows/ci_doctests.yaml @@ -42,7 +42,7 @@ jobs: # Install Micromamba with conda-forge dependencies - name: Setup Micromamba - uses: mamba-org/setup-micromamba@v2.0.1 + uses: mamba-org/setup-micromamba@v2.0.4 with: environment-name: pygmt condarc: | @@ -50,7 +50,7 @@ jobs: - conda-forge - nodefaults create-args: >- - python=3.12 + python=3.13 gmt=6.5.0 numpy pandas @@ -60,7 +60,7 @@ jobs: contextily geopandas ipython - pyarrow + pyarrow-core rioxarray make pip diff --git a/.github/workflows/ci_tests.yaml b/.github/workflows/ci_tests.yaml index 68571f14135..28d9b60e896 100644 --- a/.github/workflows/ci_tests.yaml +++ b/.github/workflows/ci_tests.yaml @@ -1,24 +1,26 @@ # Test PyGMT on Linux/macOS/Windows # -# This workflow runs regular PyGMT tests and uploads test coverage reports stored -# in `.coverage.xml` to https://app.codecov.io/gh/GenericMappingTools/pygmt -# via the [Codecov GitHub Action](https://github.com/codecov/codecov-action). -# More codecov related configurations are stored in `.github/codecov.yml`. -# If any tests fail, it also uploads the diff images as workflow artifacts. +# This workflow runs regular PyGMT tests and uploads test coverage reports stored in +# `.coverage.xml` to https://app.codecov.io/gh/GenericMappingTools/pygmt via the +# [Codecov GitHub Action](https://github.com/codecov/codecov-action). More codecov +# related configurations are stored in `.github/codecov.yml`. If any tests fail, it also +# uploads the diff images as workflow artifacts. # # It is run: # 1. on every commit to the main branch -# 2. on every commit to the pull request branches, unless the pull requests only -# contain non-code changes. +# 2. on every commit to the pull request branches, unless the pull requests only contain +# non-code changes. # 3. when a new release is published # # It is also scheduled to run daily on the main branch. # -# In draft pull request, only two jobs on Linux are triggered to save on -# Continuous Integration resources: +# In draft pull request, only jobs on Linux are triggered to save on Continuous +# Integration resources: # -# - Minimum supported Python, NumPy, pandas, Xarray versions following [SPEC 0](https://scientific-python.org/specs/spec-0000/) -# - Latest Python, NumPy versions + optional packages (e.g. GeoPandas) +# - Minimum supported Python + core packages (minimum supported versions) +# + optional packages (minimum supported versions if any) +# - Latest Python + core packages (latest versions) + optional packages +# - Last release before the latest Python + core packages # name: Tests @@ -54,40 +56,39 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.10', '3.12'] - os: [ubuntu-latest, macos-latest, windows-latest] + python-version: ['3.11', '3.13'] + os: [ubuntu-latest, ubuntu-24.04-arm, macos-latest, windows-latest] # Is it a draft Pull Request (true or false)? isDraft: - ${{ github.event.pull_request.draft }} - # Only run two jobs (Ubuntu + Python 3.10/3.12) for draft PRs + # Only run jobs on Ubuntu for draft PRs exclude: + - os: ubuntu-24.04-arm + isDraft: true - os: macos-latest isDraft: true - os: windows-latest isDraft: true - # Pair Python 3.10 with the minimum supported versions of NumPy, pandas, Xarray - # and Python 3.12 with the latest versions of NumPy, pandas, Xarray - # Only install optional packages on Python 3.12 include: - - python-version: '3.10' - numpy-version: '1.24' + # Python 3.11 + core packages (minimum supported versions) + optional packages (minimum supported versions if any) + - python-version: '3.11' + numpy-version: '1.25' pandas-version: '=2.0' xarray-version: '=2023.04' - optional-packages: '' - - python-version: '3.12' - numpy-version: '2.1' + optional-packages: ' contextily geopandas<1 ipython pyarrow-core rioxarray sphinx-gallery' + # Python 3.13 + core packages (latest versions) + optional packages + - python-version: '3.13' + numpy-version: '2.2' pandas-version: '' xarray-version: '' - optional-packages: ' contextily geopandas ipython pyarrow rioxarray sphinx-gallery' - # The job below is for testing GeoPandas v0.x on Ubuntu. - # The python-version here can't be the versions in the matrix.python-version - # defined above. Otherwise, other jobs will be overridden by this one. + optional-packages: ' contextily geopandas>=1.0 ipython pyarrow-core rioxarray sphinx-gallery' + # Python 3.12 + core packages (Linux only) - os: 'ubuntu-latest' - python-version: '3.11' # Can't be 3.10 or 3.12. - numpy-version: '1.24' - pandas-version: '=2.1' + python-version: '3.12' + numpy-version: '' + pandas-version: '' xarray-version: '' - optional-packages: ' geopandas<1 pyarrow' + optional-packages: '' timeout-minutes: 30 defaults: @@ -114,7 +115,7 @@ jobs: # Install Micromamba with conda-forge dependencies - name: Setup Micromamba - uses: mamba-org/setup-micromamba@v2.0.1 + uses: mamba-org/setup-micromamba@v2.0.4 with: environment-name: pygmt condarc: | @@ -134,7 +135,6 @@ jobs: xarray${{ matrix.xarray-version }} netCDF4 packaging - dvc make pip python-build @@ -143,7 +143,6 @@ jobs: pytest-doctestplus pytest-mpl pytest-rerunfailures - pytest-xdist # Download cached remote files (artifacts) from GitHub - name: Download remote data from GitHub @@ -156,9 +155,19 @@ jobs: env: GH_TOKEN: ${{ github.token }} + - name: Install uv + uses: astral-sh/setup-uv@v5.2.2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dvc + run: | + uv pip install dvc + uv pip list + # Pull baseline image data from dvc remote (DAGsHub) - name: Pull baseline image data from dvc remote - run: dvc pull --no-run-cache --verbose && ls -lhR pygmt/tests/baseline/ + run: uv run dvc pull --no-run-cache --verbose && ls -lhR pygmt/tests/baseline/ # Install the package that we want to test - name: Install the package @@ -166,11 +175,11 @@ jobs: # Run the regular tests - name: Run tests - run: make test PYTEST_EXTRA="-r P -n auto --reruns 2" + run: make test PYTEST_EXTRA="-r P --reruns 2" # Upload diff images on test failure - name: Upload diff images if any test fails - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.6.0 if: failure() with: name: artifact-${{ runner.os }}-${{ matrix.python-version }} @@ -178,10 +187,10 @@ jobs: # Upload coverage to Codecov - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4.6.0 + uses: codecov/codecov-action@v5.3.1 if: success() || failure() with: use_oidc: true - file: ./coverage.xml # optional + files: ./coverage.xml # optional env_vars: OS,PYTHON,NUMPY fail_ci_if_error: false diff --git a/.github/workflows/ci_tests_dev.yaml b/.github/workflows/ci_tests_dev.yaml index 4491a2ec903..d5f859f4c5d 100644 --- a/.github/workflows/ci_tests_dev.yaml +++ b/.github/workflows/ci_tests_dev.yaml @@ -36,7 +36,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-24.04, macos-15, windows-2022] + os: [ubuntu-24.04, ubuntu-24.04-arm, macos-15, windows-2025] gmt_git_ref: [master] timeout-minutes: 30 defaults: @@ -57,7 +57,7 @@ jobs: # Install Micromamba with conda-forge dependencies - name: Setup Micromamba - uses: mamba-org/setup-micromamba@v2.0.1 + uses: mamba-org/setup-micromamba@v2.0.4 with: environment-name: pygmt condarc: | @@ -69,7 +69,7 @@ jobs: # environment cache is persistent for one week. cache-environment-key: micromamba-environment-${{ steps.date.outputs.date }} create-args: >- - python=3.12 + python=3.13 cmake make ninja @@ -153,7 +153,7 @@ jobs: --extra-index https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ numpy pandas xarray netCDF4 packaging \ build contextily dvc geopandas ipython pyarrow rioxarray \ - pytest pytest-cov pytest-doctestplus pytest-mpl pytest-rerunfailures pytest-xdist\ + pytest pytest-cov pytest-doctestplus pytest-mpl pytest-rerunfailures \ sphinx-gallery # Show installed pkg information for postmortem diagnostic @@ -181,13 +181,13 @@ jobs: # Run the tests - name: Test with pytest - run: make test PYTEST_EXTRA="-r P -n auto --reruns 2" + run: make test PYTEST_EXTRA="-r P --reruns 2" env: GMT_LIBRARY_PATH: ${{ runner.temp }}/gmt-install-dir/lib # Upload diff images on test failure - name: Upload diff images if any test fails - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.6.0 if: ${{ failure() }} with: name: artifact-GMT-${{ matrix.gmt_git_ref }}-${{ runner.os }} diff --git a/.github/workflows/ci_tests_legacy.yaml b/.github/workflows/ci_tests_legacy.yaml index 5ac5dcedbbc..3df38326bef 100644 --- a/.github/workflows/ci_tests_legacy.yaml +++ b/.github/workflows/ci_tests_legacy.yaml @@ -34,7 +34,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-20.04, macos-13, windows-2019] + os: [ubuntu-22.04, ubuntu-22.04-arm, macos-13, windows-2019] gmt_version: ['6.4'] timeout-minutes: 30 defaults: @@ -51,7 +51,7 @@ jobs: # Install Micromamba with conda-forge dependencies - name: Setup Micromamba - uses: mamba-org/setup-micromamba@v2.0.1 + uses: mamba-org/setup-micromamba@v2.0.4 with: environment-name: pygmt condarc: | @@ -59,10 +59,10 @@ jobs: - conda-forge - nodefaults create-args: >- - python=3.10 + python=3.11 gmt=${{ matrix.gmt_version }} ghostscript<10 - numpy + numpy<2 pandas xarray netCDF4 @@ -70,7 +70,7 @@ jobs: contextily geopandas ipython - pyarrow + pyarrow-core rioxarray sphinx-gallery make diff --git a/.github/workflows/format-command.yml b/.github/workflows/format-command.yml index 9d43e0ae286..b45cadbd057 100644 --- a/.github/workflows/format-command.yml +++ b/.github/workflows/format-command.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: # Generate token from GenericMappingTools bot - - uses: actions/create-github-app-token@v1.11.0 + - uses: actions/create-github-app-token@v1.11.1 id: generate-token with: app-id: ${{ secrets.APP_ID }} @@ -25,9 +25,9 @@ jobs: ref: ${{ github.event.client_payload.pull_request.head.ref }} # Setup Python environment - - uses: actions/setup-python@v5.3.0 + - uses: actions/setup-python@v5.4.0 with: - python-version: '3.12' + python-version: '3.13' # Install formatting tools - name: Install formatting tools diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml index b9994d873e9..ffb54043493 100644 --- a/.github/workflows/publish-to-pypi.yml +++ b/.github/workflows/publish-to-pypi.yml @@ -35,13 +35,9 @@ on: # - main jobs: - publish-pypi: - name: Publish to PyPI + build: + name: Build distribution 📦 runs-on: ubuntu-latest - permissions: - # This permission is mandatory for OIDC publishing - id-token: write - if: github.repository == 'GenericMappingTools/pygmt' steps: - name: Checkout @@ -49,11 +45,12 @@ jobs: with: # fetch all history so that setuptools-scm works fetch-depth: 0 + persist-credentials: false - name: Set up Python - uses: actions/setup-python@v5.3.0 + uses: actions/setup-python@v5.4.0 with: - python-version: '3.12' + python-version: '3.13' - name: Install dependencies run: python -m pip install build @@ -74,11 +71,54 @@ jobs: echo "Generated files:" ls -lh dist/ - - name: Publish to Test PyPI - uses: pypa/gh-action-pypi-publish@v1.12.2 + - name: Store the distribution packages + uses: actions/upload-artifact@v4.6.0 + with: + name: python-package-distributions + path: dist/ + + publish-to-testpypi: + name: Publish Python 🐍 distribution 📦 to TestPyPI + if: github.repository == 'GenericMappingTools/pygmt' + needs: + - build + runs-on: ubuntu-latest + environment: + name: testpypi + url: https://test.pypi.org/project/pygmt + permissions: + id-token: write # IMPORTANT: mandatory for trusted OIDC publishing + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4.1.8 + with: + name: python-package-distributions + path: dist/ + + - name: Publish distribution 📦 to TestPyPI + uses: pypa/gh-action-pypi-publish@v1.12.4 with: repository-url: https://test.pypi.org/legacy/ - - name: Publish to PyPI - if: startsWith(github.ref, 'refs/tags') - uses: pypa/gh-action-pypi-publish@v1.12.2 + publish-pypi: + name: Publish Python 🐍 distribution 📦 to PyPI + if: github.repository == 'GenericMappingTools/pygmt' && startsWith(github.ref, 'refs/tags/') + needs: + - build + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/project/pygmt/ + permissions: + id-token: write # IMPORTANT: mandatory for trusted OIDC publishing + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4.1.8 + with: + name: python-package-distributions + path: dist/ + + - name: Publish distribution 📦 to PyPI + uses: pypa/gh-action-pypi-publish@v1.12.4 diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 118778439e1..5865c1af305 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -18,7 +18,7 @@ jobs: steps: # Drafts your next Release notes as Pull Requests are merged into "main" - - uses: release-drafter/release-drafter@v6.0.0 + - uses: release-drafter/release-drafter@v6.1.0 with: # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml config-name: release-drafter.yml diff --git a/.github/workflows/style_checks.yaml b/.github/workflows/style_checks.yaml index 4c5b4947c85..8bd11e06a50 100644 --- a/.github/workflows/style_checks.yaml +++ b/.github/workflows/style_checks.yaml @@ -28,9 +28,9 @@ jobs: # Setup Python - name: Set up Python - uses: actions/setup-python@v5.3.0 + uses: actions/setup-python@v5.4.0 with: - python-version: '3.12' + python-version: '3.13' - name: Install packages run: | @@ -52,3 +52,15 @@ jobs: rm output.txt exit $nfiles fi + + - name: Ensure hyphens are not used in names of directories and Python files + run: | + git ls-files '*.py' | grep '-' > output.txt || true + git ls-tree -rd --name-only HEAD | grep '-' >> output.txt || true + nfiles=$(wc --lines output.txt | awk '{print $1}') + if [[ $nfiles > 0 ]]; then + echo "Following directories/files use hyphens in file names:" + cat output.txt + rm output.txt + exit $nfiles + fi diff --git a/.github/workflows/type_checks.yml b/.github/workflows/type_checks.yml index 525735d72b1..571124375cc 100644 --- a/.github/workflows/type_checks.yml +++ b/.github/workflows/type_checks.yml @@ -37,9 +37,9 @@ jobs: # Setup Python - name: Set up Python - uses: actions/setup-python@v5.3.0 + uses: actions/setup-python@v5.4.0 with: - python-version: '3.12' + python-version: '3.13' - name: Install packages run: | diff --git a/CITATION.cff b/CITATION.cff index e6d79e2530d..c9cd6c58c0a 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -40,14 +40,14 @@ authors: family-names: Yao affiliation: Nanyang Technological University, Singapore orcid: https://orcid.org/0000-0001-7036-4238 +- given-names: Jing-Hui + family-names: Tong + affiliation: National Taiwan Normal University, Taiwan + orcid: https://orcid.org/0009-0002-7195-3071 - given-names: Yohai family-names: Magen affiliation: Tel Aviv University, Israel orcid: https://orcid.org/0000-0002-4892-4013 -- given-names: Tong - family-names: Jing-Hui - affiliation: National Taiwan Normal University, Taiwan - orcid: https://orcid.org/0009-0002-7195-3071 - given-names: Kathryn family-names: Materna affiliation: US Geological Survey, USA @@ -76,9 +76,9 @@ authors: family-names: Wessel affiliation: University of Hawaiʻi at Mānoa, USA orcid: https://orcid.org/0000-0001-5708-7336 -date-released: 2024-09-05 -doi: 10.5281/zenodo.13679420 +date-released: 2025-02-01 +doi: 10.5281/zenodo.14742338 license: BSD-3-Clause repository-code: https://github.com/GenericMappingTools/pygmt type: software -version: 0.13.0 +version: 0.14.1 diff --git a/LICENSE.txt b/LICENSE.txt index 6411f912cea..c6c569c4bc6 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2017-2024 The PyGMT Developers +Copyright (c) 2017-2025 The PyGMT Developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, diff --git a/Makefile b/Makefile index 4494f2996c3..2dd8243f95d 100644 --- a/Makefile +++ b/Makefile @@ -55,7 +55,7 @@ doctest: _runtest # run tests without image comparisons # run pytest without the --mpl option to disable image comparisons # use '-o addopts' to override 'addopts' settings in pyproject.toml file -test_no_images: PYTEST_ARGS=-o addopts="--verbose --durations=0 --durations-min=0.2 --doctest-modules" +test_no_images: PYTEST_ARGS=-o addopts="--verbose --color=yes --durations=0 --durations-min=0.2 --doctest-modules" test_no_images: _runtest format: diff --git a/README.md b/README.md index 2c936726327..13f1f023e02 100644 --- a/README.md +++ b/README.md @@ -22,22 +22,22 @@ ## Why PyGMT? -A beautiful map is worth a thousand words. To truly understand how powerful PyGMT is, play with it online on -[Binder](https://github.com/GenericMappingTools/try-gmt)! For a quicker introduction, check out our -[3 minute overview](https://youtu.be/4iPnITXrxVU)! +A beautiful map is worth a thousand words. To truly understand how powerful PyGMT is, +play with it online on [Binder](https://github.com/GenericMappingTools/try-gmt)! For a +quicker introduction, check out our [3 minute overview](https://youtu.be/4iPnITXrxVU)! -Afterwards, feel free to look at our [Tutorials](https://www.pygmt.org/latest/tutorials), visit the -[Gallery](https://www.pygmt.org/latest/gallery), and check out some +Afterwards, feel free to look at our [Tutorials](https://www.pygmt.org/latest/tutorials), +visit the [Gallery](https://www.pygmt.org/latest/gallery), and check out some [external PyGMT examples](https://www.pygmt.org/latest/external_resources.html)! -![Quick Introduction to PyGMT YouTube Video](doc/_static/scipy2022-youtube-thumbnail.jpg) +![Quick Introduction to PyGMT YouTube Video](https://raw.githubusercontent.com/GenericMappingTools/pygmt/refs/heads/main/doc/_static/scipy2022-youtube-thumbnail.jpg) ## About -PyGMT is a library for processing geospatial and geophysical data and making publication-quality -maps and figures. It provides a Pythonic interface for the -[Generic Mapping Tools (GMT)](https://github.com/GenericMappingTools/gmt), a command-line program -widely used across the Earth, Ocean, and Planetary sciences and beyond. +PyGMT is a library for processing geospatial and geophysical data and making +publication-quality maps and figures. It provides a Pythonic interface for the +[Generic Mapping Tools (GMT)](https://github.com/GenericMappingTools/gmt), a command-line +program widely used across the Earth, Ocean, and Planetary sciences and beyond. ## Project goals @@ -45,8 +45,9 @@ widely used across the Earth, Ocean, and Planetary sciences and beyond. - Build a Pythonic API for GMT. - Interface with the GMT C API directly using ctypes (no system calls). - Support for rich display in the Jupyter notebook. -- Integration with the [scientific Python ecosystem](https://scientific-python.org/): `numpy.ndarray` or - `pandas.DataFrame` for data tables, `xarray.DataArray` for grids, and `geopandas.GeoDataFrame` for geographical data. +- Integration with the [scientific Python ecosystem](https://scientific-python.org/): + `numpy.ndarray` or `pandas.DataFrame` for data tables, `xarray.DataArray` for grids, + and `geopandas.GeoDataFrame` for geographical data. ## Quickstart @@ -69,7 +70,8 @@ For other ways to install `pygmt`, see the [full installation instructions](http ### Getting started As a starting point, you can open a [Python interpreter](https://docs.python.org/3/tutorial/interpreter.html) -or a [Jupyter notebook](https://docs.jupyter.org/en/latest/running.html), and try the following example: +or a [Jupyter notebook](https://docs.jupyter.org/en/latest/running.html), and try the +following example: ``` python import pygmt @@ -79,18 +81,18 @@ fig.text(position="MC", text="PyGMT", font="80p,Helvetica-Bold,red@75") fig.show() ``` -You should see a global map with land and water masses colored in tan and lightblue, respectively. On top, -there should be the semi-transparent text "PyGMT". For more examples, please have a look at the -[Gallery](https://www.pygmt.org/latest/gallery/index.html) and +You should see a global map with land and water masses colored in tan and lightblue, +respectively. On top, there should be the semi-transparent text "PyGMT". For more examples, +please have a look at the [Gallery](https://www.pygmt.org/latest/gallery/index.html) and [Tutorials](https://www.pygmt.org/latest/tutorials/index.html). ## Contacting us - Most discussion happens [on GitHub](https://github.com/GenericMappingTools/pygmt). - Feel free to [open an issue](https://github.com/GenericMappingTools/pygmt/issues/new) or comment on any open - issue or pull request. -- We have a [Discourse forum](https://forum.generic-mapping-tools.org/c/questions/pygmt-q-a) where you can ask - questions and leave comments. + Feel free to [open an issue](https://github.com/GenericMappingTools/pygmt/issues/new) + or comment on any open issue or pull request. +- We have a [Discourse forum](https://forum.generic-mapping-tools.org/c/questions/pygmt-q-a) + where you can ask questions and leave comments. ## Contributing @@ -109,30 +111,33 @@ to see how you can help and give feedback. **We want your help.** No, really. -There may be a little voice inside your head that is telling you that you're not ready to be an open source -contributor; that your skills aren't nearly good enough to contribute. What could you possibly offer? +There may be a little voice inside your head that is telling you that you're not ready +to be an open source contributor; that your skills aren't nearly good enough to +contribute. What could you possibly offer? We assure you that the little voice in your head is wrong. -**Being a contributor doesn't just mean writing code.** Equally important contributions include: writing or -proof-reading documentation, suggesting or implementing tests, or even giving feedback about the project -(including giving feedback about the contribution process). If you're coming to the project with fresh eyes, -you might see the errors and assumptions that seasoned contributors have glossed over. If you can write any -code at all, you can contribute code to open source. We are constantly trying out new skills, making mistakes, -and learning from those mistakes. That's how we all improve and we are happy to help others learn. +**Being a contributor doesn't just mean writing code.** Equally important contributions +include: writing or proof-reading documentation, suggesting or implementing tests, or +even giving feedback about the project (including giving feedback about the contribution +process). If you're coming to the project with fresh eyes, you might see the errors and +assumptions that seasoned contributors have glossed over. If you can write any code at +all, you can contribute code to open source. We are constantly trying out new skills, +making mistakes, and learning from those mistakes. That's how we all improve and we are +happy to help others learn. *This disclaimer was adapted from the* [MetPy project](https://github.com/Unidata/MetPy). ## Citing PyGMT PyGMT is a community developed project. See the -[AUTHORS.md](https://github.com/GenericMappingTools/pygmt/blob/main/AUTHORS.md) file on GitHub for a list of -the people involved and a definition of the term "PyGMT Developers". Feel free to cite our work in your -research using the following BibTeX: +[AUTHORS.md](https://github.com/GenericMappingTools/pygmt/blob/main/AUTHORS.md) file +on GitHub for a list of the people involved and a definition of the term "PyGMT Developers". +Feel free to cite our work in your research using the following BibTeX: ``` @software{ - pygmt_2024_13679420, + pygmt_2025_14742338, author = {Tian, Dongdong and Uieda, Leonardo and Leong, Wei Ji and @@ -142,8 +147,8 @@ research using the following BibTeX: Jones, Max and Toney, Liam and Yao, Jiayuan and - Magen, Yohai and Tong, Jing-Hui and + Magen, Yohai and Materna, Kathryn and Belem, Andre and Newton, Tyler and @@ -152,20 +157,20 @@ research using the following BibTeX: Quinn, Jamie and Wessel, Paul}, title = {{PyGMT: A Python interface for the Generic Mapping Tools}}, - month = sep, - year = 2024, + month = feb, + year = 2025, publisher = {Zenodo}, - version = {0.13.0}, - doi = {10.5281/zenodo.13679420}, - url = {https://doi.org/10.5281/zenodo.13679420} + version = {0.14.1}, + doi = {10.5281/zenodo.14742338}, + url = {https://doi.org/10.5281/zenodo.14742338} } ``` To cite a specific version of PyGMT, go to our Zenodo page at -and use the "Export to BibTeX" function there. It is also strongly recommended to cite the -[GMT 6 paper](https://doi.org/10.1029/2019GC008515) (which PyGMT wraps around). Note that some modules -like `dimfilter`, `surface`, and `x2sys` also have their dedicated citations. Further information for -all these can be found at . +and use the "Export to BibTeX" function there. It is also strongly recommended to cite +the [GMT 6 paper](https://doi.org/10.1029/2019GC008515) (which PyGMT wraps around). Note +that some modules like `dimfilter`, `surface`, and `x2sys` also have their dedicated +citations. Further information for all these can be found at . ## License @@ -191,14 +196,7 @@ Other official wrappers for GMT: ## Minimum supported versions PyGMT has adopted [SPEC 0](https://scientific-python.org/specs/spec-0000/) alongside the -rest of the scientific Python ecosystem, and therefore: - -- Support for Python versions be dropped 3 years after their initial release. -- Support for core package dependencies (NumPy, pandas, Xarray) be dropped 2 years after - their initial release. - -Similarly, the PyGMT team has decided to discontinue support for GMT versions 3 years -after their initial release. - -Please see [Minimum Supported Versions](https://www.pygmt.org/dev/minversions.html) for -the minimum supported versions of GMT, Python and core package dependencies. +rest of the scientific Python ecosystem, and made a few extensions based on the needs of +the project. Please see [Minimum Supported Versions](https://www.pygmt.org/dev/minversions.html) +for the detailed policy and the minimum supported versions of GMT, Python and core +package dependencies. diff --git a/ci/requirements/docs.yml b/ci/requirements/docs.yml index 5902672fba5..5a233e36c80 100644 --- a/ci/requirements/docs.yml +++ b/ci/requirements/docs.yml @@ -4,7 +4,7 @@ channels: - nodefaults dependencies: # Required dependencies - - python=3.12 + - python=3.13 - gmt=6.5.0 - ghostscript=10.04.0 - numpy @@ -14,20 +14,24 @@ dependencies: - packaging # Optional dependencies - contextily - - geopandas<1.0 + - geopandas - ipython - - pyarrow + - pyarrow-core - rioxarray # Development dependencies (general) - make - pip - python-build # Dev dependencies (building documentation) + - geodatasets - myst-nb - panel - - sphinx + - sphinx>=6.2 - sphinx-autodoc-typehints - sphinx-copybutton - sphinx-design - sphinx-gallery - sphinx_rtd_theme<3.0 + # Dev dependencies (building PDF documentation) + # 'sphinxcontrib-svg2pdfconverter' is required since it's added to `extensions`. + - sphinxcontrib-svg2pdfconverter diff --git a/doc/Makefile b/doc/Makefile index 63ecd10f5ac..ea146705463 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -1,14 +1,12 @@ # Makefile for Sphinx documentation # You can set these variables from the command line. -SPHINXOPTS = -j auto -SPHINXBUILD = sphinx-build +SPHINXOPTS ?= -j auto +SPHINXBUILD ?= sphinx-build SPHINXAUTOGEN = sphinx-autogen +SOURCEDIR = . BUILDDIR = _build -# Internal variables. -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(SPHINXOPTS) . - .PHONY: help all api html server clean help: @@ -17,6 +15,7 @@ help: @echo " api generate rst source files of API documentation" @echo " html build the HTML files from the existing rst sources" @echo " html-noplot build the HTML files without running any examples" + @echo " pdf build the PDF documentation" @echo " server make a local HTTP server for previewing the built documentation" @echo " clean clean up built and generated files" @@ -28,23 +27,31 @@ api: @echo $(SPHINXAUTOGEN) -i -t _templates -o api/generated api/*.rst -html: api +html latex: api @echo - @echo "Building HTML files." + @echo "Building "$@" files." @echo # Set PYGMT_USE_EXTERNAL_DISPLAY to "false" to disable external display - PYGMT_USE_EXTERNAL_DISPLAY="false" $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + PYGMT_USE_EXTERNAL_DISPLAY="false" $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + @echo "Build finished. The files are in $(BUILDDIR)/$@." html-noplot: api @echo @echo "Building HTML files without example plots." @echo - $(SPHINXBUILD) -D plot_gallery=0 -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + $(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) -D plot_gallery=0 @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." +pdf: latex + @echo + @echo "Building PDF via Tectonic." + @echo + tectonic -X compile $(BUILDDIR)/latex/pygmt.tex + @echo + @echo "PDF build finished. The PDF file is in $(BUILDDIR)/latex/pygmt.pdf." + server: @echo @echo "Running a server on port 8009." @@ -55,7 +62,5 @@ server: clean: rm -rf $(BUILDDIR) rm -rf api/generated - rm -rf intro - rm -rf tutorials - rm -rf gallery - rm -rf projections + rm -rf intro tutorials gallery projections + rm -rf sg_execution_times.rst diff --git a/doc/_static/version_switch.js b/doc/_static/version_switch.js index acedd8c1d4c..c904d9c107b 100644 --- a/doc/_static/version_switch.js +++ b/doc/_static/version_switch.js @@ -12,6 +12,8 @@ var all_versions = { 'latest': 'latest', 'dev': 'dev', + 'v0.14.1': 'v0.14.1', + 'v0.14.0': 'v0.14.0', 'v0.13.0': 'v0.13.0', 'v0.12.0': 'v0.12.0', 'v0.11.0': 'v0.11.0', diff --git a/doc/_templates/autosummary/enums.rst b/doc/_templates/autosummary/enums.rst new file mode 100644 index 00000000000..26883239f1b --- /dev/null +++ b/doc/_templates/autosummary/enums.rst @@ -0,0 +1,7 @@ +{{ fullname | escape | underline }} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :members: + :member-order: bysource diff --git a/doc/api/index.rst b/doc/api/index.rst index 01fac7d7a89..25de6d44adf 100644 --- a/doc/api/index.rst +++ b/doc/api/index.rst @@ -29,12 +29,14 @@ Plotting map elements Figure.basemap Figure.coast Figure.colorbar + Figure.hlines Figure.inset Figure.legend Figure.logo Figure.solar Figure.text Figure.timestamp + Figure.vlines Plotting tabular data ~~~~~~~~~~~~~~~~~~~~~ @@ -195,6 +197,20 @@ Getting metadata from tabular or grid data: info grdinfo +Enums +----- + +.. currentmodule:: pygmt.enums + +.. autosummary:: + :toctree: generated + :nosignatures: + :template: autosummary/enums.rst + + GridRegistration + GridType + +.. currentmodule:: pygmt Miscellaneous ------------- @@ -205,8 +221,6 @@ Miscellaneous which show_versions -.. currentmodule:: pygmt - Datasets -------- @@ -221,10 +235,14 @@ and store them in GMT's user data directory. datasets.load_black_marble datasets.load_blue_marble datasets.load_earth_age + datasets.load_earth_deflection + datasets.load_earth_dist datasets.load_earth_free_air_anomaly datasets.load_earth_geoid datasets.load_earth_magnetic_anomaly datasets.load_earth_mask + datasets.load_earth_mean_dynamic_topography + datasets.load_earth_mean_sea_surface datasets.load_earth_relief datasets.load_earth_vertical_gravity_gradient datasets.load_mars_relief @@ -317,7 +335,6 @@ Low level access (these are mostly used by the :mod:`pygmt.clib` package): clib.Session.read_virtualfile clib.Session.extract_region clib.Session.get_libgmt_func - clib.Session.virtualfile_from_data clib.Session.virtualfile_from_grid clib.Session.virtualfile_from_stringio clib.Session.virtualfile_from_matrix diff --git a/doc/changes.md b/doc/changes.md index 2efe67a7f1d..322b09fffa4 100644 --- a/doc/changes.md +++ b/doc/changes.md @@ -1,5 +1,135 @@ # Changelog +## Release v0.14.1 (2025/02/01) + +[![Digital Object Identifier for PyGMT v0.14.1](https://zenodo.org/badge/DOI/10.5281/zenodo.14742338.svg)](https://doi.org/10.5281/zenodo.14742338) + +### Highlights + +- **Patch release fixing critical bugs in PyGMT v0.14.0** +- Fix the bug of converting Python sequence of datetime-like objects ([#3760](https://github.com/GenericMappingTools/pygmt/pull/3760)) + +### Maintenance + +- CI: Separate jobs for publishing to TestPyPI and PyPI ([#3742](https://github.com/GenericMappingTools/pygmt/pull/3742)) +- clib.conversion._to_numpy: Add tests for Python sequence of datetime-like objects ([#3758](https://github.com/GenericMappingTools/pygmt/pull/3758)) +- Fix an image in README.md (broken on PyPI) and rewrap to 88 characters ([#3740](https://github.com/GenericMappingTools/pygmt/pull/3740)) +- Fix the dataset link in the RGB image gallery example ([#3781](https://github.com/GenericMappingTools/pygmt/pull/3781)) +- Update License year to 2025 ([#3737](https://github.com/GenericMappingTools/pygmt/pull/3737)) + +**Full Changelog**: + +### Contributors + +* [Dongdong Tian](https://github.com/seisman) +* [Wei Ji Leong](https://github.com/weiji14) + +--- + +## Release v0.14.0 (2024/12/31) + +[![Digital Object Identifier for PyGMT v0.14.0](https://zenodo.org/badge/DOI/10.5281/zenodo.14535921.svg)](https://doi.org/10.5281/zenodo.14535921) + +### Highlights + +* 🎉 **Fourteenth minor release of PyGMT** 🎉 +* Bump minimum supported version to GMT>=6.4.0 ([#3450](https://github.com/GenericMappingTools/pygmt/pull/3450)) +* Two new plotting methods and six new functions to access more GMT remote datasets +* PyArrow as an optional dependency and improved support of PyArrow data types ([#3592](https://github.com/GenericMappingTools/pygmt/pull/3592)) + +### New Features + +* Add Figure.hlines for plotting horizontal lines ([#923](https://github.com/GenericMappingTools/pygmt/pull/923)) +* Add Figure.vlines for plotting vertical lines ([#3726](https://github.com/GenericMappingTools/pygmt/pull/3726)) +* Add load_black_marble to load "Black Marble" dataset ([#3469](https://github.com/GenericMappingTools/pygmt/pull/3469)) +* Add load_blue_marble to load "Blue Marble" dataset ([#2235](https://github.com/GenericMappingTools/pygmt/pull/2235)) +* Add load_earth_deflection to load "IGPP Earth east-west and north-south deflection" datasets ([#3728](https://github.com/GenericMappingTools/pygmt/pull/3728)) +* Add load_earth_dist to load "GSHHG Earth distance to shoreline" dataset ([#3706](https://github.com/GenericMappingTools/pygmt/pull/3706)) +* Add load_earth_mean_dynamic_topography to load "CNES Earth Mean Dynamic Topography" dataset ([#3718](https://github.com/GenericMappingTools/pygmt/pull/3718)) +* Add load_earth_mean_sea_surface to load "CNES Earth Mean Sea Surface" dataset ([#3717](https://github.com/GenericMappingTools/pygmt/pull/3717)) +* load_earth_free_air_anomaly: Add "uncertainty" parameter to load the "IGPP Earth free-air anomaly uncertainty" dataset ([#3727](https://github.com/GenericMappingTools/pygmt/pull/3727)) + + +### Enhancements + +* Figure.plot: Add the "symbol" parameter to support plotting data points with varying symbols ([#1117](https://github.com/GenericMappingTools/pygmt/pull/1117)) +* Figure.plot3d: Add the "symbol" parameter to support plotting data points with varying symbols ([#3559](https://github.com/GenericMappingTools/pygmt/pull/3559)) +* Figure.legend: Support passing a StringIO object as the legend specification ([#3438](https://github.com/GenericMappingTools/pygmt/pull/3438)) +* load_tile_map: Add parameter "crs" to set the CRS of the returned dataarray ([#3554](https://github.com/GenericMappingTools/pygmt/pull/3554)) +* PyArrow: Support pyarrow arrays with string/large_string/string_view types ([#3619](https://github.com/GenericMappingTools/pygmt/pull/3619)) +* Support 1-D/2-D numpy arrays with longlong and ulonglong dtype ([#3566](https://github.com/GenericMappingTools/pygmt/pull/3566)) +* GMT_IMAGE: Implement the to_dataarray method for 3-band images ([#3128](https://github.com/GenericMappingTools/pygmt/pull/3128)) +* Ensure non-ASCII characters are typeset correctly even if PS_CHAR_ENCODING is not "ISOLatin1+" ([#3611](https://github.com/GenericMappingTools/pygmt/pull/3611)) +* Add enums GridRegistration and GridType for grid registration and type ([#3693](https://github.com/GenericMappingTools/pygmt/pull/3693)) + +### Deprecations + +* SPEC 0: Bump minimum supported versions to Python 3.11, NumPy 1.25, pandas>=2.0 and xarray>=2023.04 ([#3460](https://github.com/GenericMappingTools/pygmt/pull/3460), [#3606](https://github.com/GenericMappingTools/pygmt/pull/3606), [#3697](https://github.com/GenericMappingTools/pygmt/pull/3697)) +* clib.Session.virtualfile_from_vectors: Now takes a sequence of vectors as its single argument (Passing multiple arguments will be unsupported in v0.16.0) ([#3522](https://github.com/GenericMappingTools/pygmt/pull/3522)) +* Remove the deprecated build_arg_string function (deprecated since v0.12.0) ([#3427](https://github.com/GenericMappingTools/pygmt/pull/3427)) +* Figure.grdcontour: Remove the deprecated syntax for the 'annotation' parameter (deprecated since v0.12.0) ([#3428](https://github.com/GenericMappingTools/pygmt/pull/3428)) + +### Bug Fixes + +* launch_external_viewer: Use full path when opening the file in a web browser ([#3647](https://github.com/GenericMappingTools/pygmt/pull/3647)) +* PyArrow: Map date32[day]/date64[ms] dtypes in pandas objects to np.datetime64 with correct date/time units ([#3617](https://github.com/GenericMappingTools/pygmt/pull/3617)) +* clib.session: Add the GMT_SESSION_NOGDALCLOSE flag to keep GDAL open ([#3672](https://github.com/GenericMappingTools/pygmt/pull/3672)) +* Set the "Conventions" attribute to "CF-1.7" for netCDF grids only ([#3463](https://github.com/GenericMappingTools/pygmt/pull/3463)) +* Fix the conversion error for pandas.Series with missing values in pandas<=2.1 ([#3505](https://github.com/GenericMappingTools/pygmt/pull/3505), [#3596](https://github.com/GenericMappingTools/pygmt/pull/3596)) +* GeoPandas: Explicitly convert columns with overflow integers to avoid OverflowError with fiona 1.10 ([#3455](https://github.com/GenericMappingTools/pygmt/pull/3455)) +* Figure.plot/Figure.plot3d: Improve the check of the "style" parameter for "v" or "V" ([#3603](https://github.com/GenericMappingTools/pygmt/pull/3603)) +* Correctly reserve the grid data dtype by converting ctypes array to numpy array with np.ctypeslib.as_array ([#3446](https://github.com/GenericMappingTools/pygmt/pull/3446)) +* **Breaking**: Figure.text: Fix typesetting of integers when mixed with floating-point values ([#3493](https://github.com/GenericMappingTools/pygmt/pull/3493)) + +### Documentation + +* Add basic tutorial "Plotting polygons" ([#3593](https://github.com/GenericMappingTools/pygmt/pull/3593)) +* Update the gallery example for plotting lines with LineString/MultiLineString geometry ([#3711](https://github.com/GenericMappingTools/pygmt/pull/3711)) +* Add the PyGMT ecosystem page ([#3475](https://github.com/GenericMappingTools/pygmt/pull/3475)) +* Document the support policy for optional packages ([#3616](https://github.com/GenericMappingTools/pygmt/pull/3616)) +* Document the environment variables that can affect the behavior of PyGMT ([#3432](https://github.com/GenericMappingTools/pygmt/pull/3432)) +* Document the built-in patterns in the Technical Reference section ([#3466](https://github.com/GenericMappingTools/pygmt/pull/3466)) +* Document Continuous Benchmarking in Maintainers Guides ([#3631](https://github.com/GenericMappingTools/pygmt/pull/3631)) +* Add instructions for installing optional dependencies ([#3506](https://github.com/GenericMappingTools/pygmt/pull/3506)) +* Update "PyData Ecosystem" to "Scientific Python Ecosystem" ([#3447](https://github.com/GenericMappingTools/pygmt/pull/3447)) +* Figure.savefig: Clarify that the "transparent" parameter also works for the PNG file associated with the KML format ([#3579](https://github.com/GenericMappingTools/pygmt/pull/3579)) +* Add the PyGMT talk at AGU24 to the "Overview" section ([#3685](https://github.com/GenericMappingTools/pygmt/pull/3685)) +* Add the GMT/PyGMT pre-conference workshop at AGU24 to the "External resources" section ([#3689](https://github.com/GenericMappingTools/pygmt/pull/3689)) +* Add TODO comments in the maintainers guides and update the release checklist ([#3724](https://github.com/GenericMappingTools/pygmt/pull/3724)) + +### Maintenance + +* **Breaking**: data_kind: data is None and required now returns the "empty" kind ([#3482](https://github.com/GenericMappingTools/pygmt/pull/3482)) +* **Breaking**: data_kind: Now "matrix" represents a 2-D numpy array and unrecognized data types fall back to "vectors" ([#3351](https://github.com/GenericMappingTools/pygmt/pull/3351)) +* Add Support for Python 3.13 ([#3490](https://github.com/GenericMappingTools/pygmt/pull/3490)) +* Add the Session.virtualfile_from_stringio method to allow StringIO input for certain functions/methods ([#3326](https://github.com/GenericMappingTools/pygmt/pull/3326)) +* Add "geodatasets" as a dependency for docs and update the choropleth example ([#3719](https://github.com/GenericMappingTools/pygmt/pull/3719)) +* PyArrow: Check compatibility of pyarrow.array with string type ([#2933](https://github.com/GenericMappingTools/pygmt/pull/2933)) +* Rename sphinx-gallery's README.txt to GALLERY_HEADER.rst and require Sphinx-Gallery>=0.17.0 ([#3348](https://github.com/GenericMappingTools/pygmt/pull/3348)) +* clib.conversion: Remove the as_c_contiguous function and use np.ascontiguousarray instead ([#3492](https://github.com/GenericMappingTools/pygmt/pull/3492)) +* Use TODO comments to track deprecations and workarounds ([#3722](https://github.com/GenericMappingTools/pygmt/pull/3722)) +* Move Figure.psconvert into a separate file ([#3553](https://github.com/GenericMappingTools/pygmt/pull/3553)) +* Improve the data type checking for 2-D arrays passed to the GMT C API ([#3563](https://github.com/GenericMappingTools/pygmt/pull/3563)) +* Enable ruff's TD (flake8-todos), COM (flake8-commas), TRY (tryceratops), and EM (flake8-errmsg) rules ([#3723](https://github.com/GenericMappingTools/pygmt/pull/3723), [#3531](https://github.com/GenericMappingTools/pygmt/pull/3531), [#3665](https://github.com/GenericMappingTools/pygmt/pull/3665), [#3661](https://github.com/GenericMappingTools/pygmt/pull/3661)) +* CI: Install pyarrow-core instead of pyarrow from conda-forge ([#3698](https://github.com/GenericMappingTools/pygmt/pull/3698)) +* CI: Ensure no hyphens in Python file and directory names in the "Style Checks" workflow ([#3703](https://github.com/GenericMappingTools/pygmt/pull/3703)) +* Bump to ruff>=0.8.0 and rename rule TCH to TC ([#3662](https://github.com/GenericMappingTools/pygmt/pull/3662)) +* Bump to Ghostscript 10.04.0 ([#3443](https://github.com/GenericMappingTools/pygmt/pull/3443)) +* Add enums GridFormat for GMT grid format ID ([#3449](https://github.com/GenericMappingTools/pygmt/pull/3449)) + +**Full Changelog**: + +### Contributors + +* [Dongdong Tian](https://github.com/seisman) +* [Yvonne Fröhlich](https://github.com/yvonnefroehlich) +* [Wei Ji Leong](https://github.com/weiji14) +* [Michael Grund](https://github.com/michaelgrund) +* [Will Schlitzer](https://github.com/willschlitzer) +* [Jiayuan Yao](https://github.com/core-man) + +--- + ## Release v0.13.0 (2024/09/05) [![Digital Object Identifier for PyGMT v0.13.0](https://zenodo.org/badge/DOI/10.5281/zenodo.13679420.svg)](https://doi.org/10.5281/zenodo.13679420) @@ -178,6 +308,8 @@ * [Michael Grund](https://github.com/michaelgrund) * [Wei Ji Leong](https://github.com/weiji14) +--- + ## Release v0.11.0 (2024/02/01) [![Digital Object Identifier for PyGMT v0.11.0](https://zenodo.org/badge/DOI/10.5281/zenodo.10578540.svg)](https://doi.org/10.5281/zenodo.10578540) diff --git a/doc/conf.py b/doc/conf.py index 1586601804d..91f032adb71 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -28,16 +28,17 @@ "sphinx.ext.autodoc", "sphinx.ext.autosummary", "sphinx.ext.coverage", - "sphinx.ext.mathjax", "sphinx.ext.doctest", - "sphinx.ext.viewcode", "sphinx.ext.extlinks", "sphinx.ext.intersphinx", + "sphinx.ext.mathjax", "sphinx.ext.napoleon", + "sphinx.ext.viewcode", "sphinx_autodoc_typehints", "sphinx_copybutton", "sphinx_design", "sphinx_gallery.gen_gallery", + "sphinxcontrib.cairosvgconverter", ] # Suppress warnings @@ -55,7 +56,7 @@ myst_enable_extensions = [ "attrs_inline", # Allow inline attributes after images "colon_fence", # Allow code fences using colons - "substitution", # Allow substituitions + "substitution", # Allow substitutions ] # These enable substitutions using {{ key }} in the Markdown files myst_substitutions = { @@ -85,6 +86,7 @@ "contextily": ("https://contextily.readthedocs.io/en/stable/", None), "geopandas": ("https://geopandas.org/en/stable/", None), "numpy": ("https://numpy.org/doc/stable/", None), + "pyarrow": ("https://arrow.apache.org/docs/", None), "python": ("https://docs.python.org/3/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), "rasterio": ("https://rasterio.readthedocs.io/en/stable/", None), @@ -170,7 +172,7 @@ ] source_suffix = ".rst" -needs_sphinx = "1.8" +needs_sphinx = "6.2" # Encoding of source files source_encoding = "utf-8-sig" root_doc = "index" @@ -209,12 +211,10 @@ repository = "GenericMappingTools/pygmt" repository_url = "https://github.com/GenericMappingTools/pygmt" if __commit__: - commit_link = ( - f'{ __commit__[:8] }' - ) + commit_link = f'{__commit__[:8]}' else: commit_link = ( - f'{ __version__ }' + f'{__version__}' ) html_context = { "menu_links": [ @@ -249,3 +249,6 @@ "github_version": "main", "commit": commit_link, } + +# Configurations for LaTeX +latex_engine = "xelatex" diff --git a/doc/contributing.md b/doc/contributing.md index 5dd93418f53..36b0d76e984 100644 --- a/doc/contributing.md +++ b/doc/contributing.md @@ -130,9 +130,9 @@ our tests. This way, the *main* branch is always stable. integrated separately. - Bug fixes should be submitted in separate PRs. * How to write and submit a PR - - Use underscores for all Python (*.py) files as per - [PEP8](https://www.python.org/dev/peps/pep-0008/), not hyphens. Directory - names should also use underscores instead of hyphens. + - Use underscores for all Python (\*.py) files as per + [PEP8](https://www.python.org/dev/peps/pep-0008/), not hyphens. Directory names + should also use underscores instead of hyphens. - Describe what your PR changes and *why* this is a good thing. Be as specific as you can. The PR description is how we keep track of the changes made to the project over time. diff --git a/doc/ecosystem.md b/doc/ecosystem.md index 3e265c2c5eb..25490f9f7ad 100644 --- a/doc/ecosystem.md +++ b/doc/ecosystem.md @@ -63,8 +63,7 @@ can be directly used in data processing and plotting functions/methods of PyGMT. add those tiles as basemap to matplotlib figures or write tile maps to disk into geospatial raster files. -In PyGMT, {func}`pygmt.datasets.load_tile_map` and {class}`pygmt.Figure.tilemap` rely -on it. +In PyGMT, {func}`pygmt.datasets.load_tile_map` and {meth}`pygmt.Figure.tilemap` rely on it. ### rioxarray @@ -73,13 +72,16 @@ of rasterio, it enables seamless reading, writing, and manipulation of multi-dim arrays with geospatial attributes such as coordinate reference systems (CRS) and spatial extent (bounds). -Currently, PyGMT relies on [rioxarray][] to save multi-band rasters to temporary files -in GeoTIFF format, to support processing and plotting 3-D {class}`xarray.DataArray` -images. +PyGMT relies on [rioxarray][] in several aspects: + +1. To save multi-band rasters to temporary files in GeoTIFF format, to support processing + and plotting 3-D {class}`xarray.DataArray` images. +2. To write CRS information to the {class}`xarray.DataArray` objects. +3. To reproject raster tiles to the target CRS in {func}`pygmt.datasets.load_tile_map`. ```{note} -We're working towards removing the dependency of the [rioxarray][] package in -[PR #3468](https://github.com/GenericMappingTools/pygmt/pull/3468). +We're working towards avoiding temporary files when processing/plotting multi-band +rasters in [PR #3468](https://github.com/GenericMappingTools/pygmt/pull/3468). ``` ### PyArrow @@ -94,9 +96,9 @@ Python objects. They are based on the C++ implementation of Arrow. ```{note} If you have [PyArrow][] installed, PyGMT does have some initial support for `pandas.Series` and `pandas.DataFrame` objects with Apache Arrow-backed arrays. -Specifically, only uint/int/float and date32/date64 are supported for now. -Support for string Array dtypes, Duration types and GeoArrow geometry types is still a work in progress. -For more details, see +Specifically, only uint/int/float, date32/date64 and string types are supported for now. +Support for Duration types and GeoArrow geometry types is still a work in progress. For +more details, see [issue #2800](https://github.com/GenericMappingTools/pygmt/issues/2800). ``` diff --git a/doc/external_resources.md b/doc/external_resources.md index d8be6a8ae5e..30c04764e50 100644 --- a/doc/external_resources.md +++ b/doc/external_resources.md @@ -12,6 +12,16 @@ to submit a pull request with your recommended addition to the :::::{grid} 1 2 2 3 +::::{grid-item-card} 2024 AGU PREWS9: Mastering Geospatial Visualizations with GMT/PyGMT +:link: https://www.generic-mapping-tools.org/agu24workshop/ +:text-align: center +:margin: 0 3 0 0 + +![](https://github.com/user-attachments/assets/9f3ab0ed-83f1-4cff-bc68-387fc13ca61f) ++++ +Wei Ji Leong, Yvonne Fröhlich, Jing-Hui Tong, Federico Esteban, Max Jones, Andre Belem +:::: + ::::{grid-item-card} 2024 PyGMT Webinar using Google Colab (in Portuguese) :link: https://github.com/andrebelem/Oficina_PyGMT :text-align: center @@ -107,7 +117,7 @@ Andre Belem :text-align: center :margin: 0 3 0 0 -![](https://github.com/tktmyd/pygmt-howto-jp/raw/main/docs/_images/inf_on_map_12_0.png) +![](https://github.com/tktmyd/pygmt-howto-jp/raw/main/docs/_images/915fa767426edefb4ec413f6094bafcfe1f3f1410b1d53073a29d2ffe9f8e6c9.png) +++ Takuto Maeda :::: diff --git a/doc/install.md b/doc/install.md index fc9251d5a1e..55d2290295f 100644 --- a/doc/install.md +++ b/doc/install.md @@ -128,14 +128,14 @@ installed (we'll call it `pygmt` but feel free to change it to whatever you want ::: {tab-item} mamba :sync: mamba ``` -mamba create --name pygmt python=3.12 numpy pandas xarray netcdf4 packaging gmt +mamba create --name pygmt python=3.13 numpy pandas xarray netcdf4 packaging gmt ``` ::: ::: {tab-item} conda :sync: conda ``` -conda create --name pygmt python=3.12 numpy pandas xarray netcdf4 packaging gmt +conda create --name pygmt python=3.13 numpy pandas xarray netcdf4 packaging gmt ``` ::: :::: @@ -162,19 +162,20 @@ From now on, all commands will take place inside the virtual environment called and won't affect your default `base` installation. ::::: {tip} -You can also enable more PyGMT functionality by installing PyGMT's optional dependencies in the environment. +You can also enable more PyGMT functionalities by installing PyGMT's optional +dependencies in the environment. :::: {tab-set} ::: {tab-item} mamba :sync: mamba ``` -mamba install contextily geopandas ipython pyarrow rioxarray +mamba install contextily geopandas ipython pyarrow-core rioxarray ``` ::: ::: {tab-item} conda :sync: conda ``` -conda install contextily geopandas ipython pyarrow rioxarray +conda install contextily geopandas ipython pyarrow-core rioxarray ``` ::: :::: diff --git a/doc/maintenance.md b/doc/maintenance.md index fd7b2f1d725..0664a2850eb 100644 --- a/doc/maintenance.md +++ b/doc/maintenance.md @@ -120,18 +120,25 @@ made to our documentation website every time we make a commit in a pull request. The service has a configuration file `.readthedocs.yaml`, with a list of options to change the default behaviour at . +## Continuous Benchmarking -## Dependencies Policy +We use the [CodSpeed](https://codspeed.io) service to continuously track PyGMT's +performance. The `pytest-codspeed` plugin collects benchmark data and uploads it to the +CodSpeed server, where results are available at . -PyGMT has adopted [SPEC 0](https://scientific-python.org/specs/spec-0000/) alongside the -rest of the scientific Python ecosystem, and therefore: +Benchmarking is handled through the `benchmarks.yml` GitHub Actions workflow. It's +automatically executed when a pull request is merged into the main branch. To trigger +benchmarking in a pull request, add the `run/benchmark` label to the pull request. -* Support for Python versions be dropped 3 years after their initial release. -* Support for core package dependencies (NumPy, pandas, Xarray) be dropped 2 years after - their initial release. +To include a new test in the benchmark suite, apply the `@pytest.mark.benchmark` +decorator to a test function. -Similarly, the PyGMT team has decided to discontinue support for GMT versions 3 years -after their initial release. +## Dependencies Policy + +PyGMT has adopted [SPEC 0](https://scientific-python.org/specs/spec-0000/) alongside the +rest of the scientific Python ecosystem, and made a few extensions based on the needs of +the project. Please see [Minimum Supported Versions](minversions.md) for the detailed +policy and the minimum supported versions of GMT, Python and core package dependencies. In `pyproject.toml`, the `requires-python` key should be set to the minimum supported version of Python. Minimum supported versions of GMT, Python and core package @@ -141,34 +148,35 @@ patch release. ## Backwards Compatibility and Deprecation Policy -PyGMT is still undergoing rapid development. All of the API is subject to change -until the v1.0.0 release. Versioning in PyGMT is based on the +PyGMT is still undergoing rapid development. All of the API is subject to change until +the v1.0.0 release. Versioning in PyGMT is based on the [semantic versioning specification](https://semver.org/spec/v2.0.0.html) -(i.e., v*MAJOR*.*MINOR*.*PATCH*). -Basic policy for backwards compatibility: +(i.e., v*MAJOR*.*MINOR*.*PATCH*). Basic policy for backwards compatibility: - Any incompatible changes should go through the deprecation process below. -- Incompatible changes are only allowed in major and minor releases, not in - patch releases. +- Incompatible changes are only allowed in major and minor releases, not in patch releases. - Incompatible changes should be documented in the release notes. When making incompatible changes, we should follow the process: - Discuss whether the incompatible changes are necessary on GitHub. -- Make the changes in a backwards compatible way, and raise a `FutureWarning` - warning for the old usage. At least one test using the old usage should be added. -- The warning message should clearly explain the changes and include the versions - in which the old usage is deprecated and is expected to be removed. -- The `FutureWarning` warning should appear for 2-4 minor versions, depending on - the impact of the changes. It means the deprecation period usually lasts - 3-12 months. +- Make the changes in a backwards compatible way, and raise a `FutureWarning` warning + for the old usage. At least one test using the old usage should be added. +- The warning message should clearly explain the changes and include the versions in + which the old usage is deprecated and is expected to be removed. +- The `FutureWarning` warning should appear for 2-4 minor versions, depending on the + impact of the changes. It means the deprecation period usually lasts 3-12 months. - Remove the old usage and warning when reaching the declared version. -To rename a function parameter, add the `@deprecate_parameter` decorator near -the top after the `@fmt_docstring` decorator but before the `@use_alias` -decorator (if those two exist). Here is an example: +### Deprecating a function parameter -``` +To rename a function parameter, add the `@deprecate_parameter` decorator near the top +after the `@fmt_docstring` decorator but before the `@use_alias` decorator (if those two +exist). A `TODO` comment should also be added to indicate the deprecation period (see below). +Here is an example: + +```python +# TODO(PyGMT>=0.6.0): Remove the deprecated "columns" parameter. @fmt_docstring @deprecate_parameter("columns", "incols", "v0.4.0", remove_version="v0.6.0") @use_alias(J="projection", R="region", V="verbose", i="incols") @@ -177,8 +185,30 @@ def plot(self, x=None, y=None, data=None, size=None, direction=None, **kwargs): pass ``` -In this case, the old parameter name `columns` is deprecated since v0.4.0, and -will be fully removed in v0.6.0. The new parameter name is `incols`. +In this case, the old parameter name `columns` is deprecated since v0.4.0, and will be +fully removed in v0.6.0. The new parameter name is `incols`. + +### TODO comments + +Occasionally, we need to implement temporary code that should be removed in the future. +This can occur in situations such as: + +- When a parameter, function, or method is deprecated and scheduled for removal. +- When workarounds are necessary to address issues in older or upcoming versions of GMT + or other dependencies. + +To track these temporary codes or workarounds, we use TODO comments. These comments +should adhere to the following format: + +```python +# TODO(package>=X.Y.Z): A brief description of the TODO item. +# Additional details if necessary. +``` +The TODO comment indicates that we should address the item when *package* version +*X.Y.Z* or later is required. + +It's important not to overuse TODO comments for tracking unimplemented features. +Instead, open issues to monitor these features. ## Making a Release diff --git a/doc/minversions.md b/doc/minversions.md index b51179cdf5f..5fb0d3d2236 100644 --- a/doc/minversions.md +++ b/doc/minversions.md @@ -18,18 +18,31 @@ myst: # Minimum Supported Versions PyGMT has adopted [SPEC 0](https://scientific-python.org/specs/spec-0000/) alongside the -rest of the scientific Python ecosystem, and therefore: +rest of the scientific Python ecosystem, and will therefore: -- Support for Python versions be dropped 3 years after their initial release. -- Support for core package dependencies (NumPy, pandas, Xarray) be dropped 2 years after - their initial release. +- Drop support for Python versions 3 years after their initial release. +- Drop support for core package dependencies (NumPy, pandas, Xarray) 2 years after their + initial release. -Similarly, the PyGMT team has decided to discontinue support for GMT versions 3 years -after their initial release. +In addition to the above, the PyGMT team has also decided to: + +- Drop support for GMT versions 3 years after their initial release, while ensuring at + least two latest minor versions remain supported. +- Maintain support for [optional dependencies](/ecosystem.md#pygmt-dependencies) for at + least 1 year after their initial release. Users are encouraged to use the most + up-to-date optional dependencies where possible. + +:::{note} +The SPEC 0 policy is enforced on a best-effort basis, and the PyGMT team may decide to +drop support for core (and optional) package dependencies earlier than recommended for +compatibility reasons. +::: | PyGMT Version | GMT | Python | NumPy | pandas | Xarray | |---|---|---|---|---|---| | [Dev][]* [] | {{ requires.gmt }} | {{ requires.python }} | {{ requires.numpy }} | {{ requires.pandas }} | {{ requires.xarray }} | +| [] | >=6.4.0 | >=3.11 | >=1.25 | >=2.0 | >=2023.04 | +| [] | >=6.4.0 | >=3.11 | >=1.25 | >=2.0 | >=2023.04 | | [] | >=6.3.0 | >=3.10 | >=1.24 | >=1.5 | >=2022.09 | | [] | >=6.3.0 | >=3.10 | >=1.23 | >=1.5 | >=2022.06 | | [] | >=6.3.0 | >=3.9 | >=1.23 | | | diff --git a/doc/overview.md b/doc/overview.md index feef05b9514..f12c4dd2c8f 100644 --- a/doc/overview.md +++ b/doc/overview.md @@ -32,6 +32,14 @@ our [Discourse forum](https://forum.generic-mapping-tools.org/c/questions/pygmt- These are conference presentations about the development of PyGMT (previously "GMT/Python"): +- "Accessing and Integrating GMT with Python and the Scientific Python Ecosystem". + 2024. + Yvonne Fröhlich, Dongdong Tian, Wei Ji Leong, Max Jones, and Michael Grund. + Presented at *AGU 2024*. + doi:[10.6084/m9.figshare.28049495](https://doi.org/10.6084/m9.figshare.28049495) + + ![](https://github.com/user-attachments/assets/19e1391e-648d-43da-b6f6-ecfb4f3e83e8){.align-center width="80%"} + - "Geospatial Analysis & Visualization with PyGMT". 2022. Max Jones, Wei Ji Leong, and Leonardo Uieda. diff --git a/doc/techref/patterns.md b/doc/techref/patterns.md index 17deb045aa3..de7df02d047 100644 --- a/doc/techref/patterns.md +++ b/doc/techref/patterns.md @@ -11,7 +11,7 @@ image raster file. The former will result in one of the 90 predefined 64x64 bit- provided by GMT (see the figure below). The latter allows the user to create customized, repeating images using image raster files. -By specifying upper case **P** instead of **p** the image will be bit-reversed, i.e., +By specifying uppercase **P** instead of **p** the image will be bit-reversed, i.e., white and black areas will be interchanged (only applies to 1-bit images or predefined bit-image patterns). For these patterns and other 1-bit images one may specify alternative **b**ackground and **f**oreground colors (by appending **+b**_color_ and/or diff --git a/environment.yml b/environment.yml index 24e27e421e6..616be8a8318 100644 --- a/environment.yml +++ b/environment.yml @@ -3,11 +3,11 @@ channels: - conda-forge - nodefaults dependencies: - - python=3.12 + - python>=3.11 # Required dependencies - gmt=6.5.0 - ghostscript=10.04.0 - - numpy>=1.24 + - numpy>=1.25 - pandas>=2.0 - xarray>=2023.04 - netCDF4 @@ -16,7 +16,7 @@ dependencies: - contextily - geopandas - ipython - - pyarrow + - pyarrow-core - rioxarray # Development dependencies (general) - dvc @@ -27,7 +27,7 @@ dependencies: # Dev dependencies (style checks) - codespell - pre-commit - - ruff>=0.3.0 + - ruff>=0.9.0 # Dev dependencies (unit testing) - matplotlib-base - pytest>=6.0 @@ -35,14 +35,19 @@ dependencies: - pytest-doctestplus - pytest-mpl # Dev dependencies (building documentation) + - geodatasets - myst-nb - panel - - sphinx + - sphinx>=6.2 - sphinx-autodoc-typehints - sphinx-copybutton - sphinx-design - sphinx-gallery>=0.17.0 - sphinx_rtd_theme<3.0 + # Dev dependencies (building PDF documentation) + - cairosvg + - sphinxcontrib-svg2pdfconverter + - tectonic # Dev dependencies (type hints) - mypy - pandas-stubs diff --git a/examples/gallery/GALLERY_HEADER.rst b/examples/gallery/GALLERY_HEADER.rst index 2fee3e77c68..cf7fdf8d938 100644 --- a/examples/gallery/GALLERY_HEADER.rst +++ b/examples/gallery/GALLERY_HEADER.rst @@ -1,5 +1,3 @@ -.. _gallery: - Gallery ======= diff --git a/examples/gallery/basemaps/double_y_axes.py b/examples/gallery/basemaps/double_y_axes.py index aa8ba8a6815..c6f970e59ca 100644 --- a/examples/gallery/basemaps/double_y_axes.py +++ b/examples/gallery/basemaps/double_y_axes.py @@ -5,7 +5,7 @@ The ``frame`` parameter of the plotting methods of the :class:`pygmt.Figure` class can control which axes should be plotted and optionally show annotations, tick marks, and gridlines. By default, all 4 axes are plotted, along with -annotations and tick marks (denoted **W**, **S**, **E**, **N**). Lower case +annotations and tick marks (denoted **W**, **S**, **E**, **N**). Lowercase versions (**w**, **s**, **e**, **n**) can be used to denote to only plot the axes with tick marks. We can also only plot the axes without annotations and tick marks using **l** (left axis), **r** (right axis), **t** (top axis), diff --git a/examples/gallery/embellishments/scalebar.py b/examples/gallery/embellishments/scalebar.py index bca7d0b9703..4b829165fe5 100644 --- a/examples/gallery/embellishments/scalebar.py +++ b/examples/gallery/embellishments/scalebar.py @@ -12,8 +12,9 @@ - **g**: Give map coordinates as *longitude*\/\ *latitude*. - **j**\|\ **J**: Specify a two-character (order independent) code. Choose from vertical **T**\(op), **M**\(iddle), or **B**\(ottom) and - horizontal **L**\(eft), **C**\(entre), or **R**\(ight). Lower / upper - case **j** / **J** mean inside / outside of the map bounding box. + horizontal **L**\(eft), **C**\(entre), or **R**\(ight). Lower / + uppercase **j** / **J** mean inside / outside of the map bounding + box. - **n**: Give normalized bounding box coordinates as *nx*\/\ *ny*. - **x**: Give plot coordinates as *x*\/\ *y*. diff --git a/examples/gallery/histograms/scatter_and_histograms.py b/examples/gallery/histograms/scatter_and_histograms.py index b493de63476..7f740452cd9 100644 --- a/examples/gallery/histograms/scatter_and_histograms.py +++ b/examples/gallery/histograms/scatter_and_histograms.py @@ -2,70 +2,75 @@ Scatter plot with histograms ============================ -To create a scatter plot with histograms at the sides of the plot one -can use :meth:`pygmt.Figure.plot` in combination with -:meth:`pygmt.Figure.histogram`. The positions of the histograms are plotted -by offsetting them from the main scatter plot figure using -:meth:`pygmt.Figure.shift_origin`. +To create a scatter plot with histograms at the sides of the plot one can use +:meth:`pygmt.Figure.plot` in combination with :meth:`pygmt.Figure.histogram`. The +positions of the histograms are plotted by offsetting them from the main scatter plot +using :meth:`pygmt.Figure.shift_origin`. """ # %% import numpy as np import pygmt -# Generate random data from a standard normal distribution centered on 0 -# with a standard deviation of 1 -rng = np.random.default_rng(seed=19680801) +# Generate random x, y coordinates from a standard normal distribution. +# x values are centered on 0 with a standard deviation of 1, and y values are centered +# on 30 with a standard deviation of 2. +rng = np.random.default_rng() x = rng.normal(loc=0, scale=1, size=1000) -y = rng.normal(loc=0, scale=1, size=1000) +y = rng.normal(loc=30, scale=2, size=1000) -# Get axis limits -xymax = max(np.max(np.abs(x)), np.max(np.abs(y))) +# Get axis limits from the data limits. Extend the limits by 0.5 to add some margin. +xmin = np.floor(x.min()) - 0.5 +xmax = np.ceil(x.max()) + 0.5 +ymin = np.floor(y.min()) - 0.5 +ymax = np.ceil(y.max()) + 0.5 +# Set fill color for symbols and bars. +fill = "seagreen" + +# Set the dimensions of the scatter plot. +width, height = 10, 8 fig = pygmt.Figure() fig.basemap( - region=[-xymax - 0.5, xymax + 0.5, -xymax - 0.5, xymax + 0.5], - projection="X10c/10c", - frame=["WSrt", "a1"], + region=[xmin, xmax, ymin, ymax], + projection=f"X{width}/{height}", + frame=["WSrt", "af"], ) -fillcol = "seagreen" - -# Plot data points as circles with a diameter of 0.15 centimeters -fig.plot(x=x, y=y, style="c0.15c", fill=fillcol, transparency=50) +# Plot data points as circles with a diameter of 0.15 centimeters and set transparency +# level for all circles to deal with overplotting. +fig.plot(x=x, y=y, style="c0.15c", fill=fill, transparency=50) -# Shift the plot origin and add top margin histogram -fig.shift_origin(yshift="10.25c") +# Shift the plot origin and add top margin histogram. +fig.shift_origin(yshift=height + 0.25) fig.histogram( - projection="X10c/2c", - frame=["Wsrt", "xf1", "y+lCounts"], - # Give the same value for ymin and ymax to have ymin and ymax - # calculated automatically - region=[-xymax - 0.5, xymax + 0.5, 0, 0], + projection=f"X{width}/3", + frame=["Wsrt", "xf", "yaf+lCounts"], + # Give the same value for ymin and ymax to have them calculated automatically. + region=[xmin, xmax, 0, 0], data=x, - fill=fillcol, + fill=fill, pen="0.1p,white", histtype=0, - series=0.1, + series=0.2, ) -# Shift the plot origin and add right margin histogram -fig.shift_origin(yshift="-10.25c", xshift="10.25c") +# Shift the plot origin and add right margin histogram. +fig.shift_origin(yshift=-height - 0.25, xshift=width + 0.25) +# Plot the horizontal histogram. fig.histogram( horizontal=True, - projection="X2c/10c", - # Note that the y-axis annotation "Counts" is shown in x-axis direction - # due to the rotation caused by horizontal=True - frame=["wSrt", "xf1", "y+lCounts"], - region=[-xymax - 0.5, xymax + 0.5, 0, 0], + projection=f"X3/{height}", + # Note that the x- and y-axis are flipped, with the y-axis plotted horizontally. + frame=["wSrt", "xf", "yaf+lCounts"], + region=[ymin, ymax, 0, 0], data=y, - fill=fillcol, + fill=fill, pen="0.1p,white", histtype=0, - series=0.1, + series=0.2, ) - fig.show() diff --git a/examples/gallery/images/cross_section.py b/examples/gallery/images/cross_section.py index 690b6797ea3..d451c8c3ee8 100644 --- a/examples/gallery/images/cross_section.py +++ b/examples/gallery/images/cross_section.py @@ -2,13 +2,11 @@ Cross-section along a transect ============================== -:func:`pygmt.project` and :func:`pygmt.grdtrack` can be used to focus on -a quantity and its variation along a desired survey line. -In this example, the elevation is extracted from a grid provided via -:func:`pygmt.datasets.load_earth_relief`. -The figure consists of two parts, a map of the elevation in the study -area showing the survey line and a Cartesian plot showing the elevation -along the survey line. +:func:`pygmt.project` and :func:`pygmt.grdtrack` can be used to focus on a quantity and +its variation along a desired survey line. In this example, the elevation is extracted +from a grid provided via :func:`pygmt.datasets.load_earth_relief`. The figure consists +of two parts, a map of the elevation in the study area showing the survey line and a +Cartesian plot showing the elevation along the survey line. *This example is orientated on an example in the GMT/China documentation*: https://docs.gmt-china.org/latest/examples/ex026/ @@ -27,33 +25,24 @@ # ---------------------------------------------------------------------------- # Bottom: Map of elevation in study area -# Set up basic map -fig.basemap( - region=region_map, - projection="M12c", # Mercator projection with a width of 12 centimeters - frame="af", -) +# Set up basic map using a Mercator projection with a width of 12 centimeters +fig.basemap(region=region_map, projection="M12c", frame="af") -# Download grid for Earth relief with a resolution of 10 arc-minutes and -# gridline registration [Default] -grid_map = pygmt.datasets.load_earth_relief( - resolution="10m", - region=region_map, -) +# Download grid for Earth relief with a resolution of 10 arc-minutes and gridline +# registration [Default] +grid_map = pygmt.datasets.load_earth_relief(resolution="10m", region=region_map) # Plot the downloaded grid with color-coding based on the elevation fig.grdimage(grid=grid_map, cmap="oleron") # Add a colorbar for the elevation fig.colorbar( - # Place the colorbar inside the plot (lower-case "j") in the Bottom - # Right (BR) corner with an offset ("+o") of 0.7 centimeters and - # 0.3 centimeters in x or y directions, respectively - # Move the x label above the horizontal colorbar ("+ml") + # Place the colorbar inside the plot (lowercase "j") in the Bottom Right (BR) + # corner with an offset ("+o") of 0.7 centimeters and 0.3 centimeters in x or y + # directions, respectively; move the x label above the horizontal colorbar ("+ml") position="jBR+o0.7c/0.8c+h+w5c/0.3c+ml", - # Add a box around the colobar with a fill ("+g") in "white" color and - # a transparency ("@") of 30 % and with a 0.8-points thick black - # outline ("+p") + # Add a box around the colobar with a fill ("+g") in "white" color and a + # transparency ("@") of 30 % and with a 0.8-points thick, black, outline ("+p") box="+gwhite@30+p0.8p,black", # Add x and y labels ("+l") frame=["x+lElevation", "y+lm"], @@ -63,7 +52,7 @@ fig.plot( x=[126, 146], # Longitude in degrees East y=[42, 40], # Latitude in degrees North - # Draw a 2-points thick red dashed line for the survey line + # Draw a 2-points thick, red, dashed line for the survey line pen="2p,red,dashed", ) @@ -79,16 +68,15 @@ # ---------------------------------------------------------------------------- # Top: Elevation along survey line -# Shift plot origin 12.5 centimeters to the top -fig.shift_origin(yshift="12.5c") +# Shift plot origin to the top by the height of the map ("+h") plus 1.5 centimeters +fig.shift_origin(yshift="h+1.5c") fig.basemap( region=[0, 15, -8000, 6000], # x_min, x_max, y_min, y_max - # Cartesian projection with a width of 12 centimeters and - # a height of 3 centimeters + # Cartesian projection with a width of 12 centimeters and a height of 3 centimeters projection="X12c/3c", - # Add annotations ("a") and ticks ("f") as well as labels ("+l") - # at the west or left and south or bottom sides ("WSrt") + # Add annotations ("a") and ticks ("f") as well as labels ("+l") at the west or + # left and south or bottom sides ("WSrt") frame=["WSrt", "xa2f1+lDistance+u°", "ya4000+lElevation / m"], ) @@ -101,28 +89,24 @@ font="10p", # Use a font size of 10 points ) -# Generate points along a great circle corresponding to the survey line -# and store them in a pandas.DataFrame +# Generate points along a great circle corresponding to the survey line and store them +# in a pandas.DataFrame track_df = pygmt.project( - center="126/42", # Start point of survey line (longitude/latitude) - endpoint="146/40", # End point of survey line (longitude/latitude) - generate="0.1", # Output data in steps of 0.1 degrees + center=[126, 42], # Start point of survey line (longitude, latitude) + endpoint=[146, 40], # End point of survey line (longitude, latitude) + generate=0.1, # Output data in steps of 0.1 degrees ) -# Extract the elevation at the generated points from the downloaded grid -# and add it as new column "elevation" to the pandas.DataFrame -track_df = pygmt.grdtrack( - grid=grid_map, - points=track_df, - newcolname="elevation", -) +# Extract the elevation at the generated points from the downloaded grid and add it as +# new column "elevation" to the pandas.DataFrame +track_df = pygmt.grdtrack(grid=grid_map, points=track_df, newcolname="elevation") # Plot water masses fig.plot( x=[0, 15], y=[0, 0], fill="lightblue", # Fill the polygon in "lightblue" - # Draw a 0.25-points thick black solid outline + # Draw a 0.25-points thick, black, solid outline pen="0.25p,black,solid", close="+y-8000", # Force closed polygon ) @@ -132,7 +116,7 @@ x=track_df.p, y=track_df.elevation, fill="gray", # Fill the polygon in "gray" - # Draw a 1-point thick black solid outline + # Draw a 1-point thick, black, solid outline pen="1p,black,solid", close="+y-8000", # Force closed polygon ) diff --git a/examples/gallery/images/rgb_image.py b/examples/gallery/images/rgb_image.py index 56d7b9d2f70..af548c7c0b3 100644 --- a/examples/gallery/images/rgb_image.py +++ b/examples/gallery/images/rgb_image.py @@ -22,13 +22,13 @@ # %% # Read 3-band data from GeoTIFF into an xarray.DataArray object: with rioxarray.open_rasterio( - filename="https://oin-hotosm.s3.us-east-1.amazonaws.com/64d6a49a19cb3a000147a65b/0/64d6a49a19cb3a000147a65c.tif", + filename="https://oin-hotosm-temp.s3.us-east-1.amazonaws.com/64d6a49a19cb3a000147a65b/0/64d6a49a19cb3a000147a65c.tif", overview_level=5, ) as img: # Subset to area of Lāhainā in EPSG:32604 coordinates image = img.rio.clip_box(minx=738000, maxx=755000, miny=2300000, maxy=2318000) image = image.load() # Force loading the DataArray into memory -image # noqa: B018 +image # %% # Plot the RGB imagery: diff --git a/examples/gallery/lines/decorated_lines.py b/examples/gallery/lines/decorated_lines.py index 42ad5dc7be9..cbe6b6b510d 100644 --- a/examples/gallery/lines/decorated_lines.py +++ b/examples/gallery/lines/decorated_lines.py @@ -51,7 +51,7 @@ "~d1c:+sd0.5c+gtan+p1p,black+n-0.2c/0.1c", # Give the number of equally spaced symbols by using "n" instead of "d" "~n6:+sn0.5c+gtan+p1p,black", - # Use upper-case "N" to have symbols at the start and end of the line + # Use uppercase "N" to have symbols at the start and end of the line "~N6:+sh0.5c+gtan+p1p,black", # Suppress the main decorated line by appending "+i" "~d1c:+sg0.5c+gtan+p1p,black+i", diff --git a/examples/gallery/lines/envelope.py b/examples/gallery/lines/envelope.py index cc0e24ba6d5..c5d7e578f9c 100644 --- a/examples/gallery/lines/envelope.py +++ b/examples/gallery/lines/envelope.py @@ -2,18 +2,18 @@ Envelope ======== -The ``close`` parameter of the :meth:`pygmt.Figure.plot` method can be -used to build a symmetrical or an asymmetrical envelope. The user can -give either the deviations or the bounds in y-direction. For the first -case append ``"+d"`` or ``"+D"`` and for the latter case ``"+b"``. +The ``close`` parameter of the :meth:`pygmt.Figure.plot` method can be used to build a +symmetrical or an asymmetrical envelope. The user can give either the deviations or the +bounds in y-direction. For the first case append ``"+d"`` or ``"+D"`` and for the latter +case ``"+b"``. """ # %% import pandas as pd import pygmt -# Define a pandas DataFrame with columns for x and y as well as the -# lower and upper deviations +# Define a pandas.DataFrame with columns for x and y as well as the lower and upper +# deviations df_devi = pd.DataFrame( data={ "x": [1, 3, 5, 7, 9], @@ -23,7 +23,7 @@ } ) -# Define the same pandas DataFrame but with lower and upper bounds +# Define the same pandas.DataFrame but with lower and upper bounds df_bound = pd.DataFrame( data={ "x": [1, 3, 5, 7, 9], @@ -34,7 +34,6 @@ ) -# Create Figure instance fig = pygmt.Figure() # ----------------------------------------------------------------------------- @@ -55,15 +54,10 @@ ) # Plot the data points on top -fig.plot( - data=df_devi, - style="c0.2c", # Use circles with a diameter of 0.2 centimeters - pen="1p,gray30", - fill="darkgray", -) +fig.plot(data=df_devi, style="c0.2c", pen="1p,gray30", fill="darkgray") -# Shift plot origin 11 centimeters in x direction -fig.shift_origin(xshift="11c") +# Shift plot origin by the figure width ("w") plus 1 centimeter in x direction +fig.shift_origin(xshift="w+1c") # ----------------------------------------------------------------------------- # Middle @@ -77,18 +71,15 @@ fig.plot( data=df_devi, fill="gray@50", - # Add an outline around the envelope - # Here, a dashed pen ("+p") with 0.5-points thickness and - # "gray30" color is used + # Add an outline around the envelope. Here, a dashed pen ("+p") with 0.5-points + # thickness and "gray30" color is used close="+D+p0.5p,gray30,dashed", pen="1p,gray30", ) -# Plot the data points on top fig.plot(data=df_devi, style="c0.2c", pen="1p,gray30", fill="darkgray") -# Shift plot origin 11 centimeters in x-direction -fig.shift_origin(xshift="11c") +fig.shift_origin(xshift="w+1c") # ----------------------------------------------------------------------------- # Right @@ -102,7 +93,6 @@ # Plot an envelope based on the bounds ("+b") fig.plot(data=df_bound, close="+b+p0.5p,gray30,dashed", pen="1p,gray30") -# Plot the data points on top fig.plot(data=df_bound, style="c0.2c", pen="1p,gray30", fill="darkgray") fig.show() diff --git a/examples/gallery/lines/hlines_vlines.py b/examples/gallery/lines/hlines_vlines.py new file mode 100644 index 00000000000..ec0d73dddab --- /dev/null +++ b/examples/gallery/lines/hlines_vlines.py @@ -0,0 +1,117 @@ +""" +Horizontal and vertical lines +============================= + +The :meth:`pygmt.Figure.hlines` and :meth:`pygmt.Figure.vlines` methods allow to plot +horizontal and vertical lines in Cartesian, geographic and polar coordinate systems. +""" + +# %% +# Cartesian coordinate system +# --------------------------- +# In Cartesian coordinate systems lines are plotted as straight lines. + +import pygmt + +fig = pygmt.Figure() + +fig.basemap( + region=[0, 10, 0, 10], projection="X10c/10c", frame=["+tCartesian hlines", "af"] +) + +# Add a horizontal line at y=9 +fig.hlines(y=9, pen="1.5p,red3", label="Line 1") +# Add a horizontal line at y=8 with x from 2 to 8 +fig.hlines(y=8, xmin=2, xmax=8, pen="1.5p,gray30,-", label="Line 2") +# Add two horizontal lines at y=6 and y=7 both with x from 3 to 7 +fig.hlines(y=[6, 7], xmin=3, xmax=7, pen="1.5p,salmon", label="Lines 3 & 4") +# Add two horizontal lines at y=4 and y=5 both with x from 4 to 9 +fig.hlines(y=[4, 5], xmin=4, xmax=9, pen="1.5p,black,.", label="Lines 5 & 6") +# Add two horizontal lines at y=2 and y=3 with different x limits +fig.hlines( + y=[2, 3], xmin=[0, 1], xmax=[7, 7.5], pen="1.5p,dodgerblue3", label="Lines 7 & 8" +) +fig.legend(position="JBR+jBR+o0.2c", box="+gwhite+p1p") + +fig.shift_origin(xshift="w+2c") + +fig.basemap( + region=[0, 10, 0, 10], projection="X10c/10c", frame=["+tCartesian vlines", "af"] +) +# Add a vertical line at x=1 +fig.vlines(x=1, pen="1.5p,red3", label="Line 1") +# Add a vertical line at x=2 with y from 2 to 8 +fig.vlines(x=2, ymin=2, ymax=8, pen="1.5p,gray30,-", label="Line 2") +# Add two vertical lines at x=3 and x=4 both with y from 3 to 7 +fig.vlines(x=[3, 4], ymin=3, ymax=7, pen="1.5p,salmon", label="Lines 3 & 4") +# Add two vertical lines at x=5 and x=6 both with y from 4 to 9 +fig.vlines(x=[5, 6], ymin=4, ymax=9, pen="1.5p,black,.", label="Lines 5 & 6") +# Add two vertical lines at x=7 and x=8 with different y limits +fig.vlines( + x=[7, 8], ymin=[0, 1], ymax=[7, 7.5], pen="1.5p,dodgerblue3", label="Lines 7 & 8" +) +fig.legend() + +fig.show() + +# %% +# Geographic coordinate system +# ---------------------------- +# The same can be done in geographic coordinate systems where "horizontal" means lines +# are plotted along parallels (constant latitude) while "vertical" means lines are +# plotted along meridians (constant longitude). + +fig = pygmt.Figure() + +fig.basemap(region="g", projection="R15c", frame=["+tGeographic hlines", "af"]) +# Add a line at 70°N +fig.hlines(y=70, pen="1.5p,red3", label="Line 1") +# Add a line at 50°N with longitude limits at 20°E and 160°E +fig.hlines(y=50, xmin=20, xmax=160, pen="1.5p,dodgerblue3", label="Line 2") +# Add a line at 30°S with longitude limits at 60°E and 270°E +fig.hlines(y=-30, xmin=60, xmax=270, pen="1.5p,gray30,-", label="Line 3") +fig.legend() + +fig.shift_origin(xshift="w+2c") + +fig.basemap(region="g", projection="R15c", frame=["+tGeographic vlines", "af"]) +# Add a line at 70°E +fig.vlines(x=70, pen="1.5p,red3", label="Line 1") +# Add a line at 20°E with latitude limits at 50°S and 70°N +fig.vlines(x=120, ymin=-50, ymax=70, pen="1.5p,dodgerblue3", label="Line 2") +# Add a line at 230°E with latitude limits at 70°S and 80°N +fig.vlines(x=230, ymin=-70, ymax=80, pen="1.5p,gray30,-", label="Line 3") +fig.legend() + +fig.show() + +# %% +# Polar coordinate system +# ----------------------- +# When using polar coordinate systems "horizontal" means lines are plotted as arcs along +# a constant radius while "vertical" means lines are plotted as straight lines along +# radius at a specified azimuth. + +fig = pygmt.Figure() + +fig.basemap(region=[0, 360, 0, 1], projection="P10c", frame=["+tPolar hlines", "af"]) +# Add a line along radius=0.8 +fig.hlines(y=0.8, pen="1.5p,red3", label="Line 1") +# Add a line along radius=0.5 with azimuth limits at 30° and 160° +fig.hlines(y=0.5, xmin=30, xmax=160, pen="1.5p,dodgerblue3", label="Line 2") +# Add a line along radius=0.25 with azimuth limits at 60° and 270° +fig.hlines(y=0.25, xmin=60, xmax=270, pen="1.5p,gray30,-", label="Line 3") +fig.legend() + +fig.shift_origin(xshift="w+2c") + +fig.basemap(region=[0, 360, 0, 1], projection="P10c", frame=["+tPolar vlines", "af"]) +# Add a line along azimuth=120° +fig.vlines(x=120, pen="1.5p,red3", label="Line 1") +# Add a line along azimuth=190° with radius limits at 0.2 and 0.8 +fig.vlines(x=190, ymin=0.2, ymax=0.8, pen="1.5p,dodgerblue3", label="Line 2") +# Add a line along azimuth=320 with radius limits at 0.5 and 0.9 +fig.vlines(x=320, ymin=0.5, ymax=0.9, pen="1.5p,gray30,-", label="Line 3") +fig.legend() + +fig.show() diff --git a/examples/gallery/lines/linestrings.py b/examples/gallery/lines/linestrings.py new file mode 100644 index 00000000000..18f94502f16 --- /dev/null +++ b/examples/gallery/lines/linestrings.py @@ -0,0 +1,46 @@ +""" +GeoPandas: Plotting lines with LineString or MultiLineString geometry +===================================================================== + +The :meth:`pygmt.Figure.plot` method allows us to plot geographical data such as lines +with LineString or MultiLineString geometry types stored in a +:class:`geopandas.GeoDataFrame` object or any object that implements the +`__geo_interface__ `__ property. + +Use :func:`geopandas.read_file` to load data from any supported OGR format such as a +shapefile (.shp), GeoJSON (.geojson), geopackage (.gpkg), etc. Then, pass the +:class:`geopandas.GeoDataFrame` object as an argument to the ``data`` parameter of +:meth:`pygmt.Figure.plot`, and style the lines using the ``pen`` parameter. +""" + +# %% +import geodatasets +import geopandas as gpd +import pygmt + +# Read a sample dataset provided by the geodatasets package. +# The dataset contains large rivers in Europe, stored as LineString/MultiLineString +# geometry types. +gdf = gpd.read_file(geodatasets.get_path("eea large_rivers")) + +# Convert object to EPSG 4326 coordinate system +gdf = gdf.to_crs("EPSG:4326") +gdf.head() + +# %% +fig = pygmt.Figure() + +fig.coast( + projection="M10c", + region=[-10, 30, 35, 57], + resolution="l", + land="gray95", + shorelines="1/0.1p,gray50", + borders="1/0.1,gray30", + frame=True, +) + +# Add rivers to map +fig.plot(data=gdf, pen="1p,steelblue") + +fig.show() diff --git a/examples/gallery/lines/quoted_lines.py b/examples/gallery/lines/quoted_lines.py index 9e70ec15c4a..2ccfb1309a6 100644 --- a/examples/gallery/lines/quoted_lines.py +++ b/examples/gallery/lines/quoted_lines.py @@ -33,7 +33,7 @@ "qd1c:+ltext+i", # Give the number of equally spaced labels by using "n" instead of "d" "qn5:+ltext", - # Use upper-case "N" to have labels at the start and end of the line + # Use uppercase "N" to have labels at the start and end of the line "qN5:+ltext", # To only plot a label at the start of the line use "N-1" "qN-1:+ltext", diff --git a/examples/gallery/lines/roads.py b/examples/gallery/lines/roads.py deleted file mode 100644 index c2a5f69980a..00000000000 --- a/examples/gallery/lines/roads.py +++ /dev/null @@ -1,46 +0,0 @@ -# ruff: noqa: RUF003 -""" -Roads -===== - -The :meth:`pygmt.Figure.plot` method allows us to plot geographical data such -as lines which are stored in a :class:`geopandas.GeoDataFrame` object. Use -:func:`geopandas.read_file` to load data from any supported OGR format such as -a shapefile (.shp), GeoJSON (.geojson), geopackage (.gpkg), etc. Then, pass the -:class:`geopandas.GeoDataFrame` as an argument to the ``data`` parameter of -:meth:`pygmt.Figure.plot`, and style the geometry using the ``pen`` parameter. -""" - -# %% -import geopandas as gpd -import pygmt - -# Read shapefile data using geopandas -gdf = gpd.read_file( - "https://www2.census.gov/geo/tiger/TIGER2015/PRISECROADS/tl_2015_15_prisecroads.zip" -) -# The dataset contains different road types listed in the RTTYP column, -# here we select the following ones to plot: -roads_common = gdf[gdf.RTTYP == "M"] # Common name roads -roads_state = gdf[gdf.RTTYP == "S"] # State recognized roads -roads_interstate = gdf[gdf.RTTYP == "I"] # Interstate roads - -fig = pygmt.Figure() - -# Define target region around Oʻahu (Hawaiʻi) -region = [-158.3, -157.6, 21.2, 21.75] # xmin, xmax, ymin, ymax - -title = "Main roads of O`ahu (Hawai`i)" # Approximating the Okina letter ʻ with ` -fig.basemap(region=region, projection="M12c", frame=["af", f"WSne+t{title}"]) -fig.coast(land="gray", water="dodgerblue4", shorelines="1p,black") - -# Plot the individual road types with different pen settings and assign labels -# which are displayed in the legend -fig.plot(data=roads_common, pen="5p,dodgerblue", label="CommonName") -fig.plot(data=roads_state, pen="2p,gold", label="StateRecognized") -fig.plot(data=roads_interstate, pen="2p,red", label="Interstate") - -# Add legend -fig.legend() - -fig.show() diff --git a/examples/gallery/maps/choropleth_map.py b/examples/gallery/maps/choropleth_map.py index 6c43d24d3dd..f1cce8c3014 100644 --- a/examples/gallery/maps/choropleth_map.py +++ b/examples/gallery/maps/choropleth_map.py @@ -2,25 +2,27 @@ Choropleth map ============== -The :meth:`pygmt.Figure.plot` method allows us to plot geographical data such -as polygons which are stored in a :class:`geopandas.GeoDataFrame` object. Use -:func:`geopandas.read_file` to load data from any supported OGR format such as -a shapefile (.shp), GeoJSON (.geojson), geopackage (.gpkg), etc. You can also -use a full URL pointing to your desired data source. Then, pass the -:class:`geopandas.GeoDataFrame` as an argument to the ``data`` parameter of -:meth:`pygmt.Figure.plot`, and style the geometry using the ``pen`` parameter. -To fill the polygons based on a corresponding column you need to set -``fill="+z"`` as well as select the appropriate column using the ``aspatial`` -parameter as shown in the example below. +The :meth:`pygmt.Figure.plot` method allows us to plot geographical data such as +polygons which are stored in a :class:`geopandas.GeoDataFrame` object. Use +:func:`geopandas.read_file` to load data from any supported OGR format such as a +shapefile (.shp), GeoJSON (.geojson), geopackage (.gpkg), etc. You can also use a full +URL pointing to your desired data source. Then, pass the :class:`geopandas.GeoDataFrame` +as an argument to the ``data`` parameter of :meth:`pygmt.Figure.plot`, and style the +geometry using the ``pen`` parameter. To fill the polygons based on a corresponding +column you need to set ``fill="+z"`` as well as select the appropriate column using the +``aspatial`` parameter as shown in the example below. """ # %% +import geodatasets import geopandas as gpd import pygmt -# Read polygon data using geopandas -gdf = gpd.read_file("https://geodacenter.github.io/data-and-lab/data/airbnb.zip") +# Read the example dataset provided by geodatasets. +gdf = gpd.read_file(geodatasets.get_path("geoda airbnb")) +print(gdf.head()) +# %% fig = pygmt.Figure() fig.basemap( @@ -29,11 +31,10 @@ frame="+tPopulation of Chicago", ) -# The dataset contains different attributes, here we select -# the "population" column to plot. +# The dataset contains different attributes, here we select the "population" column to +# plot. -# First, we define the colormap to fill the polygons based on -# the "population" column. +# First, we define the colormap to fill the polygons based on the "population" column. pygmt.makecpt( cmap="acton", series=[gdf["population"].min(), gdf["population"].max(), 10], @@ -41,8 +42,8 @@ reverse=True, ) -# Next, we plot the polygons and fill them using the defined colormap. -# The target column is defined by the aspatial parameter. +# Next, we plot the polygons and fill them using the defined colormap. The target column +# is defined by the aspatial parameter. fig.plot( data=gdf, pen="0.3p,gray10", @@ -51,7 +52,7 @@ aspatial="Z=population", ) -# Add colorbar legend +# Add colorbar legend. fig.colorbar(frame="x+lPopulation", position="jML+o-0.5c+w3.5c/0.2c") fig.show() diff --git a/examples/gallery/symbols/multi_parameter_symbols.py b/examples/gallery/symbols/multi_parameter_symbols.py index c287235022d..72480aba063 100644 --- a/examples/gallery/symbols/multi_parameter_symbols.py +++ b/examples/gallery/symbols/multi_parameter_symbols.py @@ -2,9 +2,9 @@ Multi-parameter symbols ======================= -The :meth:`pygmt.Figure.plot` method can plot individual multi-parameter -symbols by passing the corresponding shortcuts (**e**, **j**, **r**, **R**, -**w**) to the ``style`` parameter: +The :meth:`pygmt.Figure.plot` method can plot individual multi-parameter symbols by +passing the corresponding shortcuts (**e**, **j**, **r**, **R**, **w**) to the ``style`` +parameter: - **e**: ellipse - **j**: rotated rectangle @@ -18,10 +18,10 @@ import pygmt # %% -# We can plot multi-parameter symbols using the same symbol style. We need to -# define locations (lon, lat) via the ``x`` and ``y`` parameters (scalar for -# a single symbol or 1-D list for several ones) and two or three symbol -# parameters after those shortcuts via the ``style`` parameter. +# We can plot multi-parameter symbols using the same symbol style. We need to define +# locations (lon, lat) via the ``x`` and ``y`` parameters (scalar for a single symbol or +# 1-D list for several ones) and two or three symbol parameters after those shortcuts +# via the ``style`` parameter. # # The multi-parameter symbols in the ``style`` parameter are defined as: # @@ -30,13 +30,14 @@ # - **r**: rectangle, ``width/height`` # - **R**: rounded rectangle, ``width/height/radius`` # - **w**: pie wedge, ``diameter/startdir/stopdir``, the last two arguments are -# directions given in degrees counter-clockwise from horizontal +# directions given in degrees counter-clockwise from horizontal. Append **+i** and the +# desired value to apply an inner diameter. # -# Upper-case versions **E**, **J**, and **W** are similar to **e**, **j**, and -# **w** but expect geographic azimuths and distances. +# Uppercase versions **E**, **J**, and **W** are similar to **e**, **j**, and **w** +# but expect geographic azimuths and distances. fig = pygmt.Figure() -fig.basemap(region=[0, 6, 0, 2], projection="x3c", frame=True) +fig.basemap(region=[0, 7, 0, 2], projection="x3c", frame=True) # Ellipse fig.plot(x=0.5, y=1, style="e45/3/1", fill="orange", pen="2p,black") @@ -48,14 +49,16 @@ fig.plot(x=4.5, y=1, style="R1.25/4/0.5", fill="seagreen", pen="2p,black") # Pie wedge fig.plot(x=5.5, y=1, style="w2.5/45/330", fill="lightgray", pen="2p,black") +# Ring sector +fig.plot(x=6.5, y=1, style="w2.5/45/330+i1", fill="lightgray", pen="2p,black") fig.show() # %% -# We can also plot symbols with varying parameters via defining those values in -# a 2-D list or numpy array (``[[parameters]]`` for a single symbol or -# ``[[parameters_1],[parameters_2],[parameters_i]]`` for several ones) or using -# an appropriately formatted input file and passing it to ``data``. +# We can also plot symbols with varying parameters via defining those values in a 2-D +# list or numpy array (``[[parameters]]`` for a single symbol or +# ``[[parameters_1],[parameters_2],[parameters_i]]`` for several ones) or using an +# appropriately formatted input file and passing it to ``data``. # # The symbol parameters in the 2-D list or numpy array are defined as: # @@ -63,12 +66,11 @@ # - **j**: rotated rectangle, ``[[lon, lat, direction, width, height]]`` # - **r**: rectangle, ``[[lon, lat, width, height]]`` # - **R**: rounded rectangle, ``[[lon, lat, width, height, radius]]`` -# - **w**: pie wedge, ``[[lon, lat, diameter, startdir, stopdir]]``, the last -# two arguments are directions given in degrees counter-clockwise from -# horizontal +# - **w**: pie wedge, ``[[lon, lat, diameter, startdir, stopdir]]``, the last two +# arguments are directions given in degrees counter-clockwise from horizontal fig = pygmt.Figure() -fig.basemap(region=[0, 6, 0, 4], projection="x3c", frame=["xa1f0.2", "ya0.5f0.1"]) +fig.basemap(region=[0, 7, 0, 4], projection="x3c", frame=["xa1f0.2", "ya0.5f0.1"]) # Ellipse data = [[0.5, 1, 45, 3, 1], [0.5, 3, 135, 2, 1]] @@ -85,6 +87,9 @@ # Pie wedge data = [[5.5, 1, 2.5, 45, 330], [5.5, 3, 1.5, 60, 300]] fig.plot(data=data, style="w", fill="lightgray", pen="2p,black") +# Ring sector +data = [[6.5, 1, 2.5, 45, 330], [6.5, 3, 1.5, 60, 300]] +fig.plot(data=data, style="w+i1", fill="lightgray", pen="2p,black") fig.show() diff --git a/examples/intro/GALLERY_HEADER.rst b/examples/intro/GALLERY_HEADER.rst index 95363dcd2e4..a4b59b27caa 100644 --- a/examples/intro/GALLERY_HEADER.rst +++ b/examples/intro/GALLERY_HEADER.rst @@ -1,15 +1,15 @@ Intro to PyGMT ============== -Welcome to PyGMT! The tutorials in this intro are designed to teach basic -concepts to create maps in PyGMT. +Welcome to PyGMT! The tutorials in this intro are designed to teach basic concepts to +create maps in PyGMT. **About this intro** -It is assumed that PyGMT has been successfully :doc:`installed ` -on your system. To test this, run ``import pygmt`` in a Python IDE or +It is assumed that PyGMT has been successfully :doc:`installed ` on your +system. To test this, run ``import pygmt`` in a Python IDE or `Jupyter `__ notebook. -This intro will progressively cover PyGMT data manipulation and plotting -concepts, and later tutorials will use concepts explained in previous ones. -It will not cover all PyGMT functions and methods. +This intro will progressively cover PyGMT data manipulation and plotting concepts, and +later tutorials will use concepts explained in previous ones. It will not cover all +PyGMT functions and methods. diff --git a/examples/projections/cyl/cyl_oblique_mercator.py b/examples/projections/cyl/cyl_oblique_mercator.py index 17bf589d48b..22db4f973da 100644 --- a/examples/projections/cyl/cyl_oblique_mercator.py +++ b/examples/projections/cyl/cyl_oblique_mercator.py @@ -9,7 +9,7 @@ The projection is set with **o** or **O**. There are three different specification ways (**a**\|\ **A**, **b**\|\ **B**, **c**\|\ **C**) available. For all three -definitions, the upper case letter mean the projection pole is set in the southern +definitions, the uppercase letter mean the projection pole is set in the southern hemisphere [Default is northern hemisphere]. Align the y-axis with the optional modifier **+v**. The figure size is set with *scale* or *width*. """ diff --git a/examples/projections/nongeo/cartesian_linear.py b/examples/projections/nongeo/cartesian_linear.py index 021d080ae94..83e986f6739 100644 --- a/examples/projections/nongeo/cartesian_linear.py +++ b/examples/projections/nongeo/cartesian_linear.py @@ -4,7 +4,7 @@ **X**\ *width*\ [/*height*] or **x**\ *x-scale*\ [/*y-scale*] -Give the *width* of the figure and the optional *height*. The lower-case version +Give the *width* of the figure and the optional *height*. The lowercase version **x** is similar to **X** but expects an *x-scale* and an optional *y-scale*. The Cartesian linear projection is primarily designed for regular floating point diff --git a/examples/projections/nongeo/cartesian_logarithmic.py b/examples/projections/nongeo/cartesian_logarithmic.py index ef354dba73f..0e4619075ea 100644 --- a/examples/projections/nongeo/cartesian_logarithmic.py +++ b/examples/projections/nongeo/cartesian_logarithmic.py @@ -6,7 +6,7 @@ **x**\ *x-scale*\ [**l**][/*y-scale*\ [**l**]] Give the *width* of the figure and the optional *height*. -The lower-case version **x** is similar to **X** but expects +The lowercase version **x** is similar to **X** but expects an *x-scale* and an optional *y-scale*. Each axis with a logarithmic transformation requires **l** after its size argument. diff --git a/examples/projections/nongeo/cartesian_power.py b/examples/projections/nongeo/cartesian_power.py index 862ceba8595..b5f856713ed 100644 --- a/examples/projections/nongeo/cartesian_power.py +++ b/examples/projections/nongeo/cartesian_power.py @@ -6,7 +6,7 @@ **x**\ *x-scale*\ [**p**\ *pvalue*][/*y-scale*\ [**p**\ *pvalue*]] Give the *width* of the figure and the optional argument *height*. -The lower-case version **x** is similar to **X** but expects +The lowercase version **x** is similar to **X** but expects an *x-scale* and an optional *y-scale*. Each axis with a power transformation requires **p** and the exponent for that axis after its size argument. diff --git a/examples/projections/nongeo/polar.py b/examples/projections/nongeo/polar.py index cbac5567f7f..760838b34ac 100644 --- a/examples/projections/nongeo/polar.py +++ b/examples/projections/nongeo/polar.py @@ -2,8 +2,8 @@ Polar ===== -Polar projections allow plotting polar coordinate data (e.g. angle -:math:`\theta` and radius *r*). +Polar projections allow plotting polar coordinate data (e.g. angle :math:`\theta` and +radius *r*). The full syntax for polar projections is: @@ -11,41 +11,39 @@ [**+r**\ *offset*][**+t**\ *origin*][**+z**\ [**p**\|\ *radius*]] Limits are set via the ``region`` parameter -([*theta_min*, *theta_max*, *radius_min*, *radius_max*]). When using -**P**\ *width* you have to give the *width* of the figure. The lower-case -version **p** is similar to **P** but expects a *scale* instead of -a width (**p**\ *scale*). +([*theta_min*, *theta_max*, *radius_min*, *radius_max*]). When using **P**\ *width* you +have to give the *width* of the figure. The lowercase version **p** is similar to **P** +but expects a *scale* instead of a width (**p**\ *scale*). The following customizing modifiers are available: -- **+a**: by default, :math:`\theta` refers to the angle that is equivalent to - a counterclockwise rotation with respect to the east direction (standard - definition); **+a** indicates that the input data are rotated clockwise - relative to the north direction (geographical azimuth angle). +- **+a**: by default, :math:`\theta` refers to the angle that is equivalent to a + counterclockwise rotation with respect to the east direction (standard definition); + **+a** indicates that the input data are rotated clockwise relative to the north + direction (geographical azimuth angle). -- **+r**\ *offset*: represents the offset of the r-axis. This modifier allows - you to offset the center of the circle from r=0. +- **+r**\ *offset*: represents the offset of the r-axis. This modifier allows you to + offset the center of the circle from r=0. - **+t**\ *origin*: sets the angle corresponding to the east direction which is - equivalent to rotating the entire coordinate axis clockwise; if the **+a** - modifier is used, setting the angle corresponding to the north direction is - equivalent to rotating the entire coordinate axis counterclockwise. + equivalent to rotating the entire coordinate axis clockwise; if the **+a** modifier + is used, setting the angle corresponding to the north direction is equivalent to + rotating the entire coordinate axis counterclockwise. - **+f**: reverses the radial direction. - - Append **e** to indicate that the r-axis is an elevation angle, and the - range of the r-axis should be between 0° and 90°. + - Append **e** to indicate that the r-axis is an elevation angle, and the range of the + r-axis should be between 0° and 90°. - Appending **p** sets the current Earth radius (determined by - :gmt-term:`PROJ_ELLIPSOID`) - to the maximum value of the r-axis when the r-axis is reversed. + :gmt-term:`PROJ_ELLIPSOID`) to the maximum value of the r-axis when the r-axis is + reversed. - Append *radius* to set the maximum value of the r-axis. -- **+z**: indicates that the r-axis is marked as depth instead of radius (e.g. +- **+z**: indicates that the r-axis is marked as depth instead of radius (e.g., *r = radius - z*). - Append **p** to set radius to the current Earth radius. - Append *radius* to set the value of the radius. - """ # %% @@ -56,60 +54,55 @@ pygmt.config(FONT_TITLE="14p,Courier,black", FORMAT_GEO_MAP="+D") # ============ -# top left +# Top left fig.basemap( - # set map limits to theta_min = 0, theta_max = 360, radius_min = 0, - # radius_max = 1 + # Set map limits to theta_min = 0, theta_max = 360, radius_min = 0, radius_max = 1 region=[0, 360, 0, 1], - # set map width to 5 cm + # Set map width to 5 cm projection="P5c", - # set the frame and title; @^ allows for a line break within the title + # Set the frame and title; @^ allows for a line break within the title frame=["xa45f", "+gbisque+tprojection='P5c' @^ region=[0, 360, 0, 1]"], ) -fig.shift_origin(xshift="8c") +fig.shift_origin(xshift="w+3c") # ============ -# top middle +# Top middle fig.basemap( - # set map limits to theta_min = 0, theta_max = 360, radius_min = 0, - # radius_max = 1 + # Set map limits to theta_min = 0, theta_max = 360, radius_min = 0, radius_max = 1 region=[0, 360, 0, 1], - # set map width to 5 cm and interpret input data as geographic azimuth - # instead of standard angle + # Set map width to 5 cm and interpret input data as geographic azimuth instead of + # standard angle projection="P5c+a", - # set the frame and title; @^ allows for a line break within the title + # Set the frame and title; @^ allows for a line break within the title frame=["xa45f", "+gbisque+tprojection='P5c+a' @^ region=[0, 360, 0, 1]"], ) -fig.shift_origin(xshift="8c") +fig.shift_origin(xshift="w+3c") # ============ -# top right +# Top right fig.basemap( - # set map limits to theta_min = 0, theta_max = 90, radius_min = 0, - # radius_max = 1 + # Set map limits to theta_min = 0, theta_max = 90, radius_min = 0, radius_max = 1 region=[0, 90, 0, 1], - # set map width to 5 cm and interpret input data as geographic azimuth - # instead of standard angle + # Set map width to 5 cm and interpret input data as geographic azimuth instead of + # standard angle projection="P5c+a", - # set the frame and title; @^ allows for a line break within the title + # Set the frame and title; @^ allows for a line break within the title frame=["xa45f", "ya0.2", "WNe+gbisque+tprojection='P5c+a' @^ region=[0, 90, 0, 1]"], ) -fig.shift_origin(xshift="-16c", yshift="-7c") +fig.shift_origin(xshift="-2w-6c", yshift="-h-2c") # ============ -# bottom left +# Bottom left fig.basemap( - # set map limits to theta_min = 0, theta_max = 90, radius_min = 0, - # radius_max = 1 + # Set map limits to theta_min = 0, theta_max = 90, radius_min = 0, radius_max = 1 region=[0, 90, 0, 1], - # set map width to 5 cm and interpret input data as geographic azimuth - # instead of standard angle, rotate coordinate system counterclockwise by - # 45 degrees + # Set map width to 5 cm and interpret input data as geographic azimuth instead of + # standard angle, rotate coordinate system counterclockwise by 45 degrees projection="P5c+a+t45", - # set the frame and title; @^ allows for a line break within the title + # Set the frame and title; @^ allows for a line break within the title frame=[ "xa30f", "ya0.2", @@ -117,19 +110,18 @@ ], ) -fig.shift_origin(xshift="8c", yshift="1.3c") +fig.shift_origin(xshift="w+3c", yshift="1.3c") # ============ -# bottom middle +# Bottom middle fig.basemap( - # set map limits to theta_min = 0, theta_max = 90, radius_min = 3480, + # Set map limits to theta_min = 0, theta_max = 90, radius_min = 3480, # radius_max = 6371 (Earth's radius) region=[0, 90, 3480, 6371], - # set map width to 5 cm and interpret input data as geographic azimuth - # instead of standard angle, rotate coordinate system counterclockwise by - # 45 degrees + # Set map width to 5 cm and interpret input data as geographic azimuth instead of + # standard angle, rotate coordinate system counterclockwise by 45 degrees projection="P5c+a+t45", - # set the frame, and title; @^ allows for a line break within the title + # Set the frame, and title; @^ allows for a line break within the title frame=[ "xa30f", "ya", @@ -137,19 +129,19 @@ ], ) -fig.shift_origin(xshift="8c") +fig.shift_origin(xshift="w+3c") # ============ -# bottom right +# Bottom right fig.basemap( - # set map limits to theta_min = 0, theta_max = 90, radius_min = 3480, + # Set map limits to theta_min = 0, theta_max = 90, radius_min = 3480, # radius_max = 6371 (Earth's radius) region=[0, 90, 3480, 6371], - # set map width to 5 cm and interpret input data as geographic azimuth - # instead of standard angle, rotate coordinate system counterclockwise by - # 45 degrees, r-axis is marked as depth + # Set map width to 5 cm and interpret input data as geographic azimuth instead of + # standard angle, rotate coordinate system counterclockwise by 45 degrees, r-axis + # is marked as depth projection="P5c+a+t45+z", - # set the frame, and title; @^ allows for a line break within the title + # Set the frame, and title; @^ allows for a line break within the title frame=[ "xa30f", "ya", diff --git a/examples/tutorials/GALLERY_HEADER.rst b/examples/tutorials/GALLERY_HEADER.rst index 4ae40011e79..0a330b6bb2f 100644 --- a/examples/tutorials/GALLERY_HEADER.rst +++ b/examples/tutorials/GALLERY_HEADER.rst @@ -1,5 +1,3 @@ -.. _tutorials: - Tutorials ========= diff --git a/examples/tutorials/advanced/cartesian_histograms.py b/examples/tutorials/advanced/cartesian_histograms.py index c90d1876922..3a547fd92f5 100644 --- a/examples/tutorials/advanced/cartesian_histograms.py +++ b/examples/tutorials/advanced/cartesian_histograms.py @@ -2,8 +2,8 @@ Cartesian histograms ==================== -Cartesian histograms can be generated using the :meth:`pygmt.Figure.histogram` -method. In this tutorial, different histogram related aspects are addressed: +Cartesian histograms can be generated using the :meth:`pygmt.Figure.histogram` method. +In this tutorial, different histogram related aspects are addressed: - Using vertical and horizontal bars - Using stair-steps @@ -38,39 +38,36 @@ # Vertical and horizontal bars # ---------------------------- # -# To define the width of the bins, the ``series`` parameter has to be -# specified. The bars can be filled via the ``fill`` parameter with either a -# color or a pattern (see later in this tutorial). Use the ``pen`` parameter -# to adjust width, color, and style of the outlines. By default, a histogram -# with vertical bars is created. Horizontal bars can be achieved via -# ``horizontal=True``. - -# Create new figure instance +# To define the width of the bins, the ``series`` parameter has to be specified. The +# bars can be filled via the ``fill`` parameter with either a color or a pattern (see +# later in this tutorial). Use the ``pen`` parameter to adjust width, color, and style +# of the outlines. By default, a histogram with vertical bars is created. Horizontal +# bars can be achieved via ``horizontal=True``. + fig = pygmt.Figure() # Create histogram for data01 with vertical bars fig.histogram( # Define the plot range as a list of xmin, xmax, ymin, ymax - # Let ymin and ymax determined automatically by setting both to the same - # value + # Let ymin and ymax determined automatically by setting both to the same value region=[0, 200, 0, 0], projection="X10c", # Cartesian projection with a width of 10 centimeters - # Add frame, annotations (a), ticks (f), and y-axis label (+l) "Counts" - # The numbers give the steps of annotations and ticks + # Add frame, annotations ("a"), ticks ("f"), and y-axis label ("+l") "Counts"; the + # numbers give the steps of annotations and ticks frame=["WStr", "xaf10", "ya1f1+lCounts"], data=data01, # Set the bin width via the "series" parameter series=10, # Fill the bars with color "red3" fill="red3", - # Draw a 1-point thick solid outline in "darkgray" around the bars + # Draw a 1-point thick, solid outline in "darkgray" around the bars pen="1p,darkgray,solid", # Choose counts via the "histtype" parameter histtype=0, ) -# Shift plot origin 12 centimeters to the right -fig.shift_origin(xshift="12c") +# Shift plot origin by the figure width ("w") plus 2 centimeters to the right +fig.shift_origin(xshift="w+2c") # Create histogram for data01 with horizontal bars fig.histogram( @@ -82,9 +79,8 @@ fill="red3", pen="1p,darkgray,solid", histtype=0, - # Use horizontal bars - # Please note the flip of the x and y axes regarding annotations, ticks, - # gridlines, and axis labels + # Use horizontal bars. Note that the x- and y-axis are flipped, with the x-axis + # plotted vertically and the y-axis plotted horizontally. horizontal=True, ) @@ -95,10 +91,9 @@ # Stair-steps # ----------- # -# A stair-step diagram can be created by setting ``stairs=True``. Then only -# the outer outlines of the bars are drawn, and no internal bars are visible. +# A stair-step diagram can be created by setting ``stairs=True``. Then only the +# outer outlines of the bars are drawn, and no internal bars are visible. -# Create new figure instance fig = pygmt.Figure() # Create histogram for data01 @@ -108,15 +103,14 @@ frame=["WSne", "xaf10", "ya1f1+lCounts"], data=data01, series=10, - # Draw a 1-point thick dotted outline in "red3" + # Draw a 1-point thick, dotted outline in "red3" pen="1p,red3,dotted", histtype=0, # Draw stair-steps in stead of bars stairs=True, ) -# Shift plot origin 12 centimeters to the right -fig.shift_origin(xshift="12c") +fig.shift_origin(xshift="w+2c") # Create histogram for data02 fig.histogram( @@ -125,7 +119,7 @@ frame=["WSne", "xaf10", "ya1f1+lCounts"], data=data02, series=10, - # Draw a 1.5-point thick dashed outline in "orange" + # Draw a 1.5-points thick, dashed outline in "orange" pen="1.5p,orange,dashed", histtype=0, stairs=True, @@ -138,12 +132,10 @@ # Counts and frequency percent # ---------------------------- # -# By default, a histogram showing the counts in each bin is created -# (``histtype=0``). To show the frequency percent set the ``histtpye`` -# parameter to ``1``. For further options please have a look at the -# documentation of :meth:`pygmt.Figure.histogram`. +# By default, a histogram showing the counts in each bin is created (``histtype=0``). +# To show the frequency percent set the ``histtype`` parameter to ``1``. For further +# options please have a look at the documentation of :meth:`pygmt.Figure.histogram`. -# Create new figure instance fig = pygmt.Figure() # Create histogram for data02 showing counts @@ -159,8 +151,7 @@ histtype=0, ) -# Shift plot origin 11 centimeters to the right -fig.shift_origin(xshift="11c") +fig.shift_origin(xshift="w+1c") # Create histogram for data02 showing frequency percent fig.histogram( @@ -183,12 +174,11 @@ # Cumulative values # ----------------- # -# To create a histogram showing the cumulative values set ``cumulative=True``. -# Here, the bars of the cumulative histogram are filled with a pattern via -# the ``fill`` parameter. Annotate each bar with the counts it represents -# using the ``annotate`` parameter. +# To create a histogram showing the cumulative values set ``cumulative=True``. Here, +# the bars of the cumulative histogram are filled with a pattern via the ``fill`` +# parameter. Annotate each bar with the counts it represents using the ``annotate`` +# parameter. -# Create new figure instance fig = pygmt.Figure() # Create histogram for data01 showing the counts per bin @@ -205,8 +195,7 @@ annotate=True, ) -# Shift plot origin 11 centimeters to the right -fig.shift_origin(xshift="11c") +fig.shift_origin(xshift="w+1c") # Create histogram for data01 showing the cumulative counts fig.histogram( @@ -215,15 +204,15 @@ frame=["wSnE", "xaf10", "ya5f1+lCumulative counts"], data=data01, series=10, - # Use pattern (p) number 8 as fill for the bars - # Set the background (+b) to white [Default] - # Set the foreground (+f) to black [Default] + # Use pattern ("p") number 8 as fill for the bars + # Set the background ("+b") to white [Default] + # Set the foreground ("+f") to black [Default] fill="p8+bwhite+fblack", pen="1p,darkgray,solid", histtype=0, # Show cumulative counts cumulative=True, - # Offset (+o) the label by 10 points in negative y-direction + # Offset ("+o") the label by 10 points in negative y-direction annotate="+o-10p", ) @@ -234,9 +223,9 @@ # Overlaid bars # ------------- # -# Overlaid or overlapping bars can be achieved by plotting two or several -# histograms, each for one data set, on top of each other. The legend entry -# can be specified via the ``label`` parameter. +# Overlaid or overlapping bars can be achieved by plotting two or several histograms, +# each for one data set, on top of each other. The legend entry can be specified via +# the ``label`` parameter. # # Limitations of histograms with overlaid bars are: # @@ -244,7 +233,6 @@ # - Visually more colors or/and patterns than data sets # - Visually a "third histogram" (or more in case of more than two data sets) -# Create new figure instance fig = pygmt.Figure() # Create histogram for data01 @@ -283,10 +271,10 @@ # Stacked bars # ------------ # -# Histograms with stacked bars are not directly supported by PyGMT. Thus, -# before plotting, combined data sets have to be created from the single data -# sets. Then, stacked bars can be achieved similar to overlaid bars via -# plotting two or several histograms on top of each other. +# Histograms with stacked bars are not directly supported by PyGMT. Thus, before +# plotting, combined data sets have to be created from the single data sets. Then, +# stacked bars can be achieved similar to overlaid bars via plotting two or several +# histograms on top of each other. # # Limitations of histograms with stacked bars are: # @@ -296,7 +284,6 @@ # Combine the two data sets to one data set data_merge = np.concatenate((data01, data02), axis=None) -# Create new figure instance fig = pygmt.Figure() # Create histogram for data02 by using the combined data set @@ -309,8 +296,7 @@ fill="orange", pen="1p,darkgray,solid", histtype=0, - # The combined data set appears in the final histogram visually - # as data set data02 + # The combined data set appears in the final histogram visually as data set data02 label="data02", ) @@ -346,7 +332,6 @@ # Width used for binning the data binwidth = 10 -# Create new figure instance fig = pygmt.Figure() # Create histogram for data01 @@ -359,11 +344,11 @@ fill="red3", pen="1p,darkgray,solid", histtype=0, - # Calculate the bar width in respect to the bin width, here for two - # data sets half of the bin width - # Offset (+o) the bars to align each bar with the left limit of the - # corresponding bin - barwidth=f"{binwidth/2}+o-{binwidth/4}", + # Calculate the bar width in respect to the bin width, here for two data sets half + # of the bin width + # Offset ("+o") the bars to align each bar with the left limit of the corresponding + # bin + barwidth=f"{binwidth / 2}+o-{binwidth / 4}", label="data01", ) @@ -374,7 +359,7 @@ fill="orange", pen="1p,darkgray,solid", histtype=0, - barwidth=f"{binwidth/2}+o{binwidth/4}", + barwidth=f"{binwidth / 2}+o{binwidth / 4}", label="data02", ) diff --git a/examples/tutorials/advanced/configuration.py b/examples/tutorials/advanced/configuration.py index 03d5523fd4e..f7d6b70c057 100644 --- a/examples/tutorials/advanced/configuration.py +++ b/examples/tutorials/advanced/configuration.py @@ -2,8 +2,7 @@ Configuring PyGMT defaults ========================== -Default GMT parameters can be set globally or locally using -:class:`pygmt.config`. +Default GMT parameters can be set globally or locally using :class:`pygmt.config`. """ # %% @@ -13,9 +12,9 @@ # Configuring default GMT parameters # ---------------------------------- # -# Users can override default parameters either temporarily (locally) or -# permanently (globally) using :class:`pygmt.config`. The full list of default -# parameters that can be changed can be found at :gmt-docs:`gmt.conf.html`. +# Users can override default parameters either temporarily (locally) or permanently +# (globally) using :class:`pygmt.config`. The full list of default parameters that can +# be changed can be found at :gmt-docs:`gmt.conf.html`. # # We demonstrate the usage of :class:`pygmt.config` by configuring a map plot. @@ -31,18 +30,17 @@ # Globally overriding defaults # ---------------------------- # -# The ``MAP_FRAME_TYPE`` parameter specifies the style of map frame to use, of -# which there are 5 options: ``fancy`` (default, see above), ``fancy+``, -# ``plain``, ``graph`` (which does not apply to geographical maps) and -# ``inside``. +# The ``MAP_FRAME_TYPE`` parameter specifies the style of map frame to use, of which +# there are 5 options: ``fancy`` (default, see above), ``fancy+``, ``plain``, ``graph`` +# (which does not apply to geographical maps) and ``inside``. # -# The ``FORMAT_GEO_MAP`` parameter controls the format of geographical tick -# annotations. The default uses degrees and minutes. Here we specify the ticks -# to be a decimal number of degrees. +# The ``FORMAT_GEO_MAP`` parameter controls the format of geographical tick annotations. +# The default uses degrees and minutes. Here we specify the ticks to be a decimal number +# of degrees. fig = pygmt.Figure() -# Configuration for the 'current figure'. +# Configuration for the 'current figure' pygmt.config(MAP_FRAME_TYPE="plain") pygmt.config(FORMAT_GEO_MAP="ddd.xx") @@ -56,14 +54,13 @@ # Locally overriding defaults # --------------------------- # -# It is also possible to temporarily override the default parameters, which is -# very useful for limiting the scope of changes to a particular plot. -# :class:`pygmt.config` is implemented as a context manager, which handles the -# setup and teardown of a GMT session. Python users are likely familiar with -# the ``with open(...) as file:`` snippet, which returns a ``file`` context -# manager. In this way, it can be used to override a parameter for a single -# command, or a sequence of commands. An application of :class:`pygmt.config` -# as a context manager is shown below: +# It is also possible to temporarily override the default parameters, which is very +# useful for limiting the scope of changes to a particular plot. :class:`pygmt.config` +# is implemented as a context manager, which handles the setup and teardown of a GMT +# session. Python users are likely familiar with the ``with open(...) as file:`` +# snippet, which returns a ``file`` context manager. In this way, it can be used to +# override a parameter for a single command, or a sequence of commands. An application +# of :class:`pygmt.config` as a context manager is shown below: fig = pygmt.Figure() @@ -72,8 +69,8 @@ fig.basemap(region=[115, 119.5, 4, 7.5], projection="M10c", frame=True) fig.coast(land="black", water="skyblue") -# Shift plot origin down by 10cm to plot another map -fig.shift_origin(yshift="-10c") +# Shift plot origin down by the height of the figure to plot another map +fig.shift_origin(yshift="-h") # This figure retains the default "fancy" frame fig.basemap(region=[115, 119.5, 4, 7.5], projection="M10c", frame=True) diff --git a/examples/tutorials/advanced/date_time_charts.py b/examples/tutorials/advanced/date_time_charts.py index 1fcea100384..d198b3c7d70 100644 --- a/examples/tutorials/advanced/date_time_charts.py +++ b/examples/tutorials/advanced/date_time_charts.py @@ -196,7 +196,7 @@ # uses :func:`pandas.date_range` to fill the DataArray with data, but this is not # essential for the creation of a valid DataArray. -x = xr.DataArray(data=pd.date_range(start="2020-01-01", periods=4, freq="Q")) +x = xr.DataArray(data=pd.date_range(start="2020-01-01", periods=4, freq="QE")) y = [4, 7, 5, 6] fig = pygmt.Figure() @@ -331,7 +331,7 @@ # found at :gmt-term:`FORMAT_CLOCK_MAP`, :gmt-term:`FORMAT_CLOCK_IN`, and # :gmt-term:`FORMAT_CLOCK_OUT`. -x = pd.date_range("2021-04-15", periods=8, freq="6H") +x = pd.date_range("2021-04-15", periods=8, freq="6h") y = [2, 5, 3, 1, 5, 7, 9, 6] fig = pygmt.Figure() diff --git a/examples/tutorials/basics/frames.py b/examples/tutorials/basics/frames.py index 56b4e9ac130..b3fb466fa23 100644 --- a/examples/tutorials/basics/frames.py +++ b/examples/tutorials/basics/frames.py @@ -90,9 +90,9 @@ # :meth:`pygmt.Figure.basemap`. The map boundaries (or plot axes) are named as # West/west/left (**W**, **w**, **l**), South/south/bottom # (**S**, **s**, **b**), North/north/top (**N**, **n**, **t**), and -# East/east/right (**E**, **e**, **r**) sides of a figure. If an upper-case +# East/east/right (**E**, **e**, **r**) sides of a figure. If an uppercase # letter (**W**, **S**, **N**, **E**) is passed, the axis is plotted with -# tick marks and annotations. The lower-case version +# tick marks and annotations. The lowercase version # (**w**, **s**, **n**, **e**) plots the axis only with tick marks. # To only plot the axis pass **l**, **b**, **t**, **r**. By default # (``frame=True`` or ``frame="af"``), the West and the South axes are diff --git a/examples/tutorials/basics/plot.py b/examples/tutorials/basics/plot.py index babfb60d771..42cb8aad5ae 100644 --- a/examples/tutorials/basics/plot.py +++ b/examples/tutorials/basics/plot.py @@ -18,7 +18,9 @@ # The data are loaded as a :class:`pandas.DataFrame`. data = pygmt.datasets.load_sample_data(name="japan_quakes") +data.head() +# %% # Set the region for the plot to be slightly larger than the data bounds. region = [ data.longitude.min() - 1, @@ -26,9 +28,7 @@ data.latitude.min() - 1, data.latitude.max() + 1, ] - -print(region) -print(data.head()) +region # %% # We'll use the :meth:`pygmt.Figure.plot` method to plot circles on the diff --git a/examples/tutorials/basics/polygons.py b/examples/tutorials/basics/polygons.py new file mode 100644 index 00000000000..edd90fea6d8 --- /dev/null +++ b/examples/tutorials/basics/polygons.py @@ -0,0 +1,100 @@ +""" +Plotting polygons +================= + +Plotting polygons is handled by the :meth:`pygmt.Figure.plot` method. + +This tutorial focuses on input data given as NumPy arrays. Besides NumPy arrays, +array-like objects are supported. Here, a polygon is a closed shape defined by a series +of data points with x and y coordinates, connected by line segments, with the start and +end points being identical. For plotting a :class:`geopandas.GeoDataFrame` object with +polygon geometries, e.g., to create a choropleth map, see the gallery example +:doc:`Choropleth map `. +""" + +# %% +import numpy as np +import pygmt + +# %% +# Plot polygons +# ------------- +# +# Set up sample data points as NumPy arrays for the x and y values. + +x = np.array([-2, 1, 3, 0, -4, -2]) +y = np.array([-3, -1, 1, 3, 2, -3]) + +# %% +# Create a Cartesian plot via the :meth:`pygmt.Figure.basemap` method. Pass arrays to +# the ``x`` and ``y`` parameters of the :meth:`pygmt.Figure.plot` method. Without +# further adjustments, lines are drawn between the data points. By default, the lines +# are 0.25-points thick, black, and solid. In this example, the data points are chosen +# to make the lines form a polygon. + +fig = pygmt.Figure() +fig.basemap(region=[-5, 5, -5, 5], projection="X5c", frame=True) +fig.plot(x=x, y=y) +fig.show() + +# %% +# The ``pen`` parameter can be used to adjust the lines or outline of the polygon. The +# argument passed to ``pen`` is one string with the comma-separated optional values +# *width*,\ *color*,\ *style*. + +fig = pygmt.Figure() +fig.basemap(region=[-5, 5, -5, 5], projection="X5c", frame=True) +# Use a 2-points thick, darkred, dashed outline +fig.plot(x=x, y=y, pen="2p,darkred,dashed") +fig.show() + +# %% +# Use the ``fill`` parameter to fill the polygon with a color or +# :doc:`pattern `. Note, that there are no lines drawn between the +# data points by default if ``fill`` is used. Use the ``pen`` parameter to add an +# outline around the polygon. + +fig = pygmt.Figure() +fig.basemap(region=[-5, 5, -5, 5], projection="X5c", frame=True) +# Fill the polygon with color "orange" +fig.plot(x=x, y=y, fill="orange") +fig.show() + + +# %% +# Close polygons +# -------------- +# +# Set up sample data points as NumPy arrays for the x and y values. Now, the data points +# do not form a polygon. + +x = np.array([-2, 1, 3, 0, -4]) +y = np.array([-3, -1, 1, 3, 2]) + +# %% +# The ``close`` parameter can be used to force the polygon to be closed. + +fig = pygmt.Figure() +fig.basemap(region=[-5, 5, -5, 5], projection="X5c", frame=True) +fig.plot(x=x, y=y, pen=True) + +fig.shift_origin(xshift="w+1c") + +fig.basemap(region=[-5, 5, -5, 5], projection="X5c", frame=True) +fig.plot(x=x, y=y, pen=True, close=True) +fig.show() + +# %% +# When using the ``fill`` parameter, the polygon is automatically closed. + +fig = pygmt.Figure() +fig.basemap(region=[-5, 5, -5, 5], projection="X5c", frame=True) +fig.plot(x=x, y=y, pen=True) + +fig.shift_origin(xshift="w+1c") + +fig.basemap(region=[-5, 5, -5, 5], projection="X5c", frame=True) +fig.plot(x=x, y=y, pen=True, fill="orange") +fig.show() + +# sphinx_gallery_thumbnail_number = 5 diff --git a/pygmt/_show_versions.py b/pygmt/_show_versions.py index e529f053e46..f0d4b4e3c2f 100644 --- a/pygmt/_show_versions.py +++ b/pygmt/_show_versions.py @@ -16,7 +16,7 @@ from pygmt.clib import Session, __gmt_version__ # Get semantic version through setuptools-scm -__version__ = f'v{version("pygmt")}' # e.g. v0.1.2.dev3+g0ab3cd78 +__version__ = f"v{version('pygmt')}" # e.g. v0.1.2.dev3+g0ab3cd78 __commit__ = __version__.split("+g")[-1] if "+g" in __version__ else "" # 0ab3cd78 @@ -85,7 +85,7 @@ def _check_ghostscript_version(gs_version: str | None) -> str | None: return None -def show_versions(file: TextIO | None = sys.stdout): +def show_versions(file: TextIO | None = sys.stdout) -> None: """ Print various dependency versions which are useful when submitting bug reports. diff --git a/pygmt/_typing.py b/pygmt/_typing.py index bbc7d596c65..4a57c3c7678 100644 --- a/pygmt/_typing.py +++ b/pygmt/_typing.py @@ -2,7 +2,17 @@ Type aliases for type hints. """ +import contextlib +import importlib +from collections.abc import Sequence from typing import Literal +import numpy as np + # Anchor codes AnchorCode = Literal["TL", "TC", "TR", "ML", "MC", "MR", "BL", "BC", "BR"] + +# String array types +StringArrayTypes = Sequence[str] | np.ndarray +with contextlib.suppress(ImportError): + StringArrayTypes |= importlib.import_module(name="pyarrow").StringArray diff --git a/pygmt/accessors.py b/pygmt/accessors.py index 0b401e43256..ec73f69238f 100644 --- a/pygmt/accessors.py +++ b/pygmt/accessors.py @@ -138,10 +138,11 @@ def registration(self): @registration.setter def registration(self, value): if value not in {0, 1}: - raise GMTInvalidInput( + msg = ( f"Invalid grid registration value: {value}, should be either " "0 for Gridline registration or 1 for Pixel registration." ) + raise GMTInvalidInput(msg) self._registration = value @property @@ -154,8 +155,9 @@ def gtype(self): @gtype.setter def gtype(self, value): if value not in {0, 1}: - raise GMTInvalidInput( + msg = ( f"Invalid coordinate system type: {value}, should be " "either 0 for Cartesian or 1 for Geographic." ) + raise GMTInvalidInput(msg) self._gtype = value diff --git a/pygmt/clib/conversion.py b/pygmt/clib/conversion.py index 59fa0d584cd..7823aa32103 100644 --- a/pygmt/clib/conversion.py +++ b/pygmt/clib/conversion.py @@ -2,6 +2,7 @@ Functions to convert data types into ctypes friendly formats. """ +import contextlib import ctypes as ctp import warnings from collections.abc import Sequence @@ -156,25 +157,63 @@ def _to_numpy(data: Any) -> np.ndarray: array The C contiguous NumPy array. """ - # Mapping of unsupported dtypes to the expected NumPy dtype. - dtypes: dict[str, str | type] = { + # Mapping of unsupported dtypes to expected NumPy dtypes. + dtypes: dict[str, type | str] = { + # For string dtypes. + "large_string": np.str_, # pa.large_string and pa.large_utf8 + "string": np.str_, # pa.string, pa.utf8, pd.StringDtype + "string_view": np.str_, # pa.string_view + # For datetime dtypes. "date32[day][pyarrow]": "datetime64[D]", "date64[ms][pyarrow]": "datetime64[ms]", } + # The dtype for the input object. + dtype = getattr(data, "dtype", getattr(data, "type", "")) + # The numpy dtype for the result numpy array, but can be None. + numpy_dtype = dtypes.get(str(dtype)) + + # TODO(pandas>=2.2): Remove the workaround for pandas<2.2. + # + # pandas numeric dtypes were converted to np.object_ dtype prior pandas 2.2, and are + # converted to suitable NumPy dtypes since pandas 2.2. Refer to the following link + # for details: https://pandas.pydata.org/docs/whatsnew/v2.2.0.html#to-numpy-for-numpy-nullable-and-arrow-types-converts-to-suitable-numpy-dtype if ( - hasattr(data, "isna") - and data.isna().any() - and Version(pd.__version__) < Version("2.2") - ): - # Workaround for dealing with pd.NA with pandas < 2.2. - # Bug report at: https://github.com/GenericMappingTools/pygmt/issues/2844 - # Following SPEC0, pandas 2.1 will be dropped in 2025 Q3, so it's likely - # we can remove the workaround in PyGMT v0.17.0. - array = np.ascontiguousarray(data.astype(float)) - else: - vec_dtype = str(getattr(data, "dtype", "")) - array = np.ascontiguousarray(data, dtype=dtypes.get(vec_dtype)) + Version(pd.__version__) < Version("2.2") # pandas < 2.2 only. + and hasattr(data, "dtype") # NumPy array or pandas objects only. + and hasattr(data.dtype, "numpy_dtype") # pandas dtypes only. + and data.dtype.kind in "iuf" # Numeric dtypes only. + ): # pandas Series/Index with pandas nullable numeric dtypes. + # The numpy dtype of the result numpy array. + numpy_dtype = data.dtype.numpy_dtype + if getattr(data, "hasnans", False): + if data.dtype.kind in "iu": + # Integers with missing values are converted to float64. + numpy_dtype = np.float64 + data = data.to_numpy(na_value=np.nan) + + # Deal with timezone-aware datetime dtypes. + if isinstance(dtype, pd.DatetimeTZDtype): # pandas.DatetimeTZDtype + numpy_dtype = getattr(dtype, "base", None) + elif isinstance(dtype, pd.ArrowDtype) and hasattr(dtype.pyarrow_dtype, "tz"): + # pd.ArrowDtype[pa.Timestamp] + numpy_dtype = getattr(dtype, "numpy_dtype", None) + # TODO(pandas>=2.1): Remove the workaround for pandas<2.1. + if Version(pd.__version__) < Version("2.1"): + # In pandas 2.0, dtype.numpy_type is dtype("O"). + numpy_dtype = np.dtype(f"M8[{dtype.pyarrow_dtype.unit}]") # type: ignore[assignment, attr-defined] + + array = np.ascontiguousarray(data, dtype=numpy_dtype) + + # Check if a np.object_ or np.str_ array can be converted to np.datetime64. + if array.dtype.type in {np.object_, np.str_}: + with contextlib.suppress(TypeError, ValueError): + return np.ascontiguousarray(array, dtype=np.datetime64) + + # Check if a np.object_ array can be converted to np.str_. + if array.dtype == np.object_: + with contextlib.suppress(TypeError, ValueError): + return np.ascontiguousarray(array, dtype=np.str_) return array @@ -275,12 +314,13 @@ def sequence_to_ctypes_array( def strings_to_ctypes_array(strings: Sequence[str] | np.ndarray) -> ctp.Array: """ - Convert a sequence (e.g., a list) of strings into a ctypes array. + Convert a sequence (e.g., a list) of strings or numpy.ndarray of strings into a + ctypes array. Parameters ---------- strings - A sequence of strings. + A sequence of strings, or a numpy.ndarray of str dtype. Returns ------- @@ -295,6 +335,13 @@ def strings_to_ctypes_array(strings: Sequence[str] | np.ndarray) -> ctp.Array: >>> [s.decode() for s in ctypes_array] ['first', 'second', 'third'] + + >>> strings = np.array(["first", "second", "third"]) + >>> ctypes_array = strings_to_ctypes_array(strings) + >>> type(ctypes_array) + + >>> [s.decode() for s in ctypes_array] + ['first', 'second', 'third'] """ return (ctp.c_char_p * len(strings))(*[s.encode() for s in strings]) diff --git a/pygmt/clib/loading.py b/pygmt/clib/loading.py index ee8c97fcb1a..c6bc446c8e0 100644 --- a/pygmt/clib/loading.py +++ b/pygmt/clib/loading.py @@ -118,7 +118,8 @@ def clib_names(os_name: str) -> list[str]: case "win32": # Windows libnames = ["gmt.dll", "gmt_w64.dll", "gmt_w32.dll"] case _: - raise GMTOSError(f"Operating system '{os_name}' is not supported.") + msg = f"Operating system '{os_name}' is not supported." + raise GMTOSError(msg) return libnames diff --git a/pygmt/clib/session.py b/pygmt/clib/session.py index 10c8770adaa..179737d35f9 100644 --- a/pygmt/clib/session.py +++ b/pygmt/clib/session.py @@ -173,7 +173,8 @@ def session_pointer(self) -> ctp.c_void_p: the context manager). """ if getattr(self, "_session_pointer", None) is None: - raise GMTCLibNoSessionError("No currently open GMT API session.") + msg = "No currently open GMT API session." + raise GMTCLibNoSessionError(msg) return self._session_pointer @session_pointer.setter @@ -276,7 +277,8 @@ def get_enum(self, name: str) -> int: session = None value = c_get_enum(session, name.encode()) if value is None or value == -99999: - raise GMTCLibError(f"Constant '{name}' doesn't exist in libgmt.") + msg = f"Constant '{name}' doesn't exist in libgmt." + raise GMTCLibError(msg) return value def get_libgmt_func( @@ -325,7 +327,7 @@ def get_libgmt_func( function.restype = restype return function - def create(self, name: str): + def create(self, name: str) -> None: """ Create a new GMT C API session. @@ -382,7 +384,8 @@ def print_func(file_pointer, message): # noqa: ARG001 We'll capture the messages and print them to stderr so that they will show up on the Jupyter notebook. """ - # Have to use try..except due to upstream GMT bug in GMT <= 6.5.0. + # TODO(GMT>6.5.0): Remove the workaround for upstream bug in GMT<=6.5.0. + # Have to use try..except due to upstream GMT bug in GMT<=6.5.0. # See https://github.com/GenericMappingTools/pygmt/issues/3205. try: message = message.decode().strip() @@ -398,7 +401,9 @@ def print_func(file_pointer, message): # noqa: ARG001 self._print_callback = print_func padding = self["GMT_PAD_DEFAULT"] - session_type = self["GMT_SESSION_EXTERNAL"] + # GMT_SESSION_EXTERNAL: GMT is called by an external wrapper. + # GMT_SESSION_NOGDALCLOSE: Do not call GDALDestroyDriverManager when using GDAL. + session_type = self["GMT_SESSION_EXTERNAL"] + self["GMT_SESSION_NOGDALCLOSE"] session = c_create_session(name.encode(), padding, session_type, print_func) if session is None: @@ -558,7 +563,8 @@ def get_common(self, option: str) -> bool | int | float | np.ndarray: pygmt.exceptions.GMTInvalidInput: Unknown GMT common option flag 'A'. """ if option not in "BIJRUVXYabfghinoprst:": - raise GMTInvalidInput(f"Unknown GMT common option flag '{option}'.") + msg = f"Unknown GMT common option flag '{option}'." + raise GMTInvalidInput(msg) c_get_common = self.get_libgmt_func( "GMT_Get_Common", @@ -588,7 +594,7 @@ def get_common(self, option: str) -> bool | int | float | np.ndarray: case _: # 'status' is the option value (in integer type). return status - def call_module(self, module: str, args: str | list[str]): + def call_module(self, module: str, args: str | list[str]) -> None: """ Call a GMT module with the given arguments. @@ -940,7 +946,9 @@ def _check_dtype_and_dim(self, array: np.ndarray, ndim: int) -> int: raise GMTInvalidInput(msg) return self[DTYPES[dtype]] - def put_vector(self, dataset: ctp.c_void_p, column: int, vector: np.ndarray): + def put_vector( + self, dataset: ctp.c_void_p, column: int, vector: np.ndarray + ) -> None: r""" Attach a 1-D numpy array as a column on a GMT dataset. @@ -999,7 +1007,9 @@ def put_vector(self, dataset: ctp.c_void_p, column: int, vector: np.ndarray): ) raise GMTCLibError(msg) - def put_strings(self, dataset: ctp.c_void_p, family: str, strings: np.ndarray): + def put_strings( + self, dataset: ctp.c_void_p, family: str, strings: np.ndarray + ) -> None: """ Attach a 1-D numpy array of dtype str as a column on a GMT dataset. @@ -1053,7 +1063,9 @@ def put_strings(self, dataset: ctp.c_void_p, family: str, strings: np.ndarray): msg = f"Failed to put strings of type {strings.dtype} into dataset." raise GMTCLibError(msg) - def put_matrix(self, dataset: ctp.c_void_p, matrix: np.ndarray, pad: int = 0): + def put_matrix( + self, dataset: ctp.c_void_p, matrix: np.ndarray, pad: int = 0 + ) -> None: """ Attach a 2-D numpy array to a GMT dataset. @@ -1194,10 +1206,11 @@ def read_data( data, ) if data_ptr is None: - raise GMTCLibError(f"Failed to read dataset from '{infile}'.") + msg = f"Failed to read dataset from '{infile}'." + raise GMTCLibError(msg) return ctp.cast(data_ptr, ctp.POINTER(dtype)) - def write_data(self, family, geometry, mode, wesn, output, data): + def write_data(self, family, geometry, mode, wesn, output, data) -> None: """ Write a GMT data container to a file. @@ -1264,7 +1277,8 @@ def write_data(self, family, geometry, mode, wesn, output, data): data, ) if status != 0: - raise GMTCLibError(f"Failed to write dataset to '{output}'") + msg = f"Failed to write dataset to '{output}'." + raise GMTCLibError(msg) @contextlib.contextmanager def open_virtualfile( @@ -1381,23 +1395,6 @@ def open_virtualfile( msg = f"Failed to close virtual file '{vfname}'." raise GMTCLibError(msg) - def open_virtual_file(self, family, geometry, direction, data): - """ - Open a GMT virtual file associated with a data object for reading or writing. - - .. deprecated: 0.11.0 - - Will be removed in v0.15.0. Use :meth:`pygmt.clib.Session.open_virtualfile` - instead. - """ - msg = ( - "API function `Session.open_virtual_file()' has been deprecated " - "since v0.11.0 and will be removed in v0.15.0. " - "Use `Session.open_virtualfile()' instead." - ) - warnings.warn(msg, category=FutureWarning, stacklevel=2) - return self.open_virtualfile(family, geometry, direction, data) - @contextlib.contextmanager def virtualfile_from_vectors( self, vectors: Sequence, *args @@ -1447,9 +1444,9 @@ def virtualfile_from_vectors( ... print(fout.read().strip()) : N = 3 <1/3> <4/6> <7/9> """ + # TODO(PyGMT>=0.16.0): Remove the "*args" parameter and related codes. # "*args" is added in v0.14.0 for backward-compatibility with the deprecated # syntax of passing multiple vectors as positional arguments. - # Remove it in v0.16.0. if len(args) > 0: msg = ( "Passing multiple arguments to Session.virtualfile_from_vectors is " @@ -1475,7 +1472,7 @@ def virtualfile_from_vectors( # 2 columns contains coordinates like longitude, latitude, or datetime string # types. for col, array in enumerate(arrays[2:]): - if pd.api.types.is_string_dtype(array.dtype): + if np.issubdtype(array.dtype, np.str_): columns = col + 2 break @@ -1506,9 +1503,9 @@ def virtualfile_from_vectors( strings = string_arrays[0] elif len(string_arrays) > 1: strings = np.array( - [" ".join(vals) for vals in zip(*string_arrays, strict=True)] + [" ".join(vals) for vals in zip(*string_arrays, strict=True)], + dtype=np.str_, ) - strings = np.asanyarray(a=strings, dtype=np.str_) self.put_strings( dataset, family="GMT_IS_VECTOR|GMT_IS_DUPLICATE", strings=strings ) @@ -1856,9 +1853,8 @@ def virtualfile_in( elif check_kind == "vector": valid_kinds += ("empty", "matrix", "vectors", "geojson") if kind not in valid_kinds: - raise GMTInvalidInput( - f"Unrecognized data type for {check_kind}: {type(data)}" - ) + msg = f"Unrecognized data type for {check_kind}: {type(data)}." + raise GMTInvalidInput(msg) # Decide which virtualfile_from_ function to use _virtualfile_from = { @@ -1912,42 +1908,6 @@ def virtualfile_in( file_context = _virtualfile_from(_data) return file_context - def virtualfile_from_data( - self, - check_kind=None, - data=None, - x=None, - y=None, - z=None, - extra_arrays=None, - required_z=False, - required_data=True, - ): - """ - Store any data inside a virtual file. - - .. deprecated: 0.13.0 - - Will be removed in v0.15.0. Use :meth:`pygmt.clib.Session.virtualfile_in` - instead. - """ - msg = ( - "API function 'Session.virtualfile_from_data()' has been deprecated since " - "v0.13.0 and will be removed in v0.15.0. Use 'Session.virtualfile_in()' " - "instead." - ) - warnings.warn(msg, category=FutureWarning, stacklevel=2) - return self.virtualfile_in( - check_kind=check_kind, - data=data, - x=x, - y=y, - z=z, - extra_arrays=extra_arrays, - required_z=required_z, - required_data=required_data, - ) - @contextlib.contextmanager def virtualfile_out( self, @@ -2110,7 +2070,8 @@ def read_virtualfile( if kind is None: # Return the ctypes void pointer return pointer if kind == "cube": - raise NotImplementedError(f"kind={kind} is not supported yet.") + msg = f"kind={kind} is not supported yet." + raise NotImplementedError(msg) dtype = {"dataset": _GMT_DATASET, "grid": _GMT_GRID, "image": _GMT_IMAGE}[kind] return ctp.cast(pointer, ctp.POINTER(dtype)) @@ -2378,5 +2339,6 @@ def extract_region(self) -> np.ndarray: region.ctypes.data_as(ctp.POINTER(ctp.c_double)), ) if status != 0: - raise GMTCLibError("Failed to extract region from current figure.") + msg = "Failed to extract region from current figure." + raise GMTCLibError(msg) return region diff --git a/pygmt/conftest.py b/pygmt/conftest.py index bc896d44732..ee491b7f8bc 100644 --- a/pygmt/conftest.py +++ b/pygmt/conftest.py @@ -5,7 +5,7 @@ import numpy as np from packaging.version import Version -# Keep this until we require numpy to be >=2.0 +# TODO(NumPy>=2.0): Remove the conftest.py file. # Address https://github.com/GenericMappingTools/pygmt/issues/2628. if Version(np.__version__) >= Version("2.0.0.dev0+git20230726"): np.set_printoptions(legacy="1.25") # type: ignore[arg-type] diff --git a/pygmt/datasets/__init__.py b/pygmt/datasets/__init__.py index d70eec5a1de..3d44a8fe676 100644 --- a/pygmt/datasets/__init__.py +++ b/pygmt/datasets/__init__.py @@ -6,10 +6,16 @@ from pygmt.datasets.earth_age import load_earth_age from pygmt.datasets.earth_day import load_blue_marble +from pygmt.datasets.earth_deflection import load_earth_deflection +from pygmt.datasets.earth_dist import load_earth_dist from pygmt.datasets.earth_free_air_anomaly import load_earth_free_air_anomaly from pygmt.datasets.earth_geoid import load_earth_geoid from pygmt.datasets.earth_magnetic_anomaly import load_earth_magnetic_anomaly from pygmt.datasets.earth_mask import load_earth_mask +from pygmt.datasets.earth_mean_dynamic_topography import ( + load_earth_mean_dynamic_topography, +) +from pygmt.datasets.earth_mean_sea_surface import load_earth_mean_sea_surface from pygmt.datasets.earth_night import load_black_marble from pygmt.datasets.earth_relief import load_earth_relief from pygmt.datasets.earth_vertical_gravity_gradient import ( diff --git a/pygmt/datasets/earth_deflection.py b/pygmt/datasets/earth_deflection.py new file mode 100644 index 00000000000..c0a9cbf406f --- /dev/null +++ b/pygmt/datasets/earth_deflection.py @@ -0,0 +1,119 @@ +""" +Function to download the IGPP Earth east-west and north-south deflection datasets from +the GMT data server, and load as :class:`xarray.DataArray`. + +The grids are available in various resolutions. +""" + +from collections.abc import Sequence +from typing import Literal + +import xarray as xr +from pygmt.datasets.load_remote_dataset import _load_remote_dataset + +__doctest_skip__ = ["load_earth_deflection"] + + +def load_earth_deflection( + resolution: Literal[ + "01d", "30m", "20m", "15m", "10m", "06m", "05m", "04m", "03m", "02m", "01m" + ] = "01d", + region: Sequence[float] | str | None = None, + registration: Literal["gridline", "pixel", None] = None, + component: Literal["east", "north"] = "east", +) -> xr.DataArray: + r""" + Load the IGPP Earth east-west and north-south deflection datasets in various + resolutions. + + .. list-table:: + :widths: 50 50 + :header-rows: 1 + + * - IGPP Earth east-west deflection + - IGPP Earth north-south deflection + * - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_edefl.jpg + - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_ndefl.jpg + + The grids are downloaded to a user data directory (usually + ``~/.gmt/server/earth/earth_edefl/`` and ``~/.gmt/server/earth/earth_ndefl/`` the + first time you invoke this function. Afterwards, it will load the grid from the + data directory. So you'll need an internet connection the first time around. + + These grids can also be accessed by passing in the file name + **@**\ *earth_defl_type*\_\ *res*\[_\ *reg*] to any grid processing function or + plotting method. *earth_defl_type* is the GMT name for the dataset. The available + options are **earth_edefl** and **earth_ndefl**. *res* is the grid resolution (see + below), and *reg* is the grid registration type (**p** for pixel registration or + **g** for gridline registration). + + The default color palette table (CPTs) for this dataset is *@earth_defl.cpt*. It's + implicitly used when passing in the file name of the dataset to any grid plotting + method if no CPT is explicitly specified. When the dataset is loaded and plotted as + an :class:`xarray.DataArray` object, the default CPT is ignored, and GMT's default + CPT (*turbo*) is used. To use the dataset-specific CPT, you need to explicitly set + ``cmap="@earth_defl.cpt"``. + + Refer to :gmt-datasets:`earth-edefl.html` and :gmt-datasets:`earth-ndefl.html` for + more details about available datasets, including version information and references. + + Parameters + ---------- + resolution + The grid resolution. The suffix ``d`` and ``m`` stand for arc-degrees and + arc-minutes. + region + The subregion of the grid to load, in the form of a sequence [*xmin*, *xmax*, + *ymin*, *ymax*] or an ISO country code. Required for grids with resolutions + higher than 5 arc-minutes (i.e., ``"05m"``). + registration + Grid registration type. Either ``"pixel"`` for pixel registration or + ``"gridline"`` for gridline registration. Default is ``None``, which means + ``"gridline"`` for all resolutions except ``"01m"`` which is ``"pixel"`` only. + component + By default, the east-west deflection (``component="east"``) is returned, + set ``component="north"`` to return the north-south deflection. + + Returns + ------- + grid + The Earth east-west or north-south deflection grid. Coordinates are latitude + and longitude in degrees. Deflection values are in micro-radians, where + positive (negative) values indicate a deflection to the east or north (west + or south). + + Note + ---- + The registration and coordinate system type of the returned + :class:`xarray.DataArray` grid can be accessed via the GMT accessors (i.e., + ``grid.gmt.registration`` and ``grid.gmt.gtype`` respectively). However, these + properties may be lost after specific grid operations (such as slicing) and will + need to be manually set before passing the grid to any PyGMT data processing or + plotting functions. Refer to :class:`pygmt.GMTDataArrayAccessor` for detailed + explanations and workarounds. + + Examples + -------- + + >>> from pygmt.datasets import load_earth_deflection + >>> # load the default grid for east-west deflection (gridline-registered + >>> # 1 arc-degree grid) + >>> grid = load_earth_deflection() + >>> # load the default grid for north-south deflection + >>> grid = load_earth_deflection(component="north") + >>> # load the 30 arc-minutes grid with "gridline" registration + >>> grid = load_earth_deflection(resolution="30m", registration="gridline") + >>> # load high-resolution (5 arc-minutes) grid for a specific region + >>> grid = load_earth_deflection( + ... resolution="05m", region=[120, 160, 30, 60], registration="gridline" + ... ) + """ + prefix = "earth_ndefl" if component == "north" else "earth_edefl" + grid = _load_remote_dataset( + name=prefix, + prefix=prefix, + resolution=resolution, + region=region, + registration=registration, + ) + return grid diff --git a/pygmt/datasets/earth_dist.py b/pygmt/datasets/earth_dist.py new file mode 100644 index 00000000000..4897c475b43 --- /dev/null +++ b/pygmt/datasets/earth_dist.py @@ -0,0 +1,105 @@ +""" +Function to download the GSHHG Earth distance to shoreline dataset from the GMT data +server, and load as :class:`xarray.DataArray`. + +The grids are available in various resolutions. +""" + +from collections.abc import Sequence +from typing import Literal + +import xarray as xr +from pygmt.datasets.load_remote_dataset import _load_remote_dataset + +__doctest_skip__ = ["load_earth_dist"] + + +def load_earth_dist( + resolution: Literal[ + "01d", "30m", "20m", "15m", "10m", "06m", "05m", "04m", "03m", "02m", "01m" + ] = "01d", + region: Sequence[float] | str | None = None, + registration: Literal["gridline", "pixel"] = "gridline", +) -> xr.DataArray: + r""" + Load the GSHHG Earth distance to shoreline dataset in various resolutions. + + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_dist.jpg + :width: 80 % + :align: center + + GSHHG Earth distance to shoreline dataset. + + The grids are downloaded to a user data directory (usually + ``~/.gmt/server/earth/earth_dist/``) the first time you invoke this function. + Afterwards, it will load the grid from the data directory. So you'll need an + internet connection the first time around. + + These grids can also be accessed by passing in the file name + **@earth_dist**\_\ *res*\[_\ *reg*] to any grid processing function or plotting + method. *res* is the grid resolution (see below), and *reg* is the grid registration + type (**p** for pixel registration or **g** for gridline registration). + + The default color palette table (CPT) for this dataset is *@earth_dist.cpt*. It's + implicitly used when passing in the file name of the dataset to any grid plotting + method if no CPT is explicitly specified. When the dataset is loaded and plotted + as an :class:`xarray.DataArray` object, the default CPT is ignored, and GMT's + default CPT (*turbo*) is used. To use the dataset-specific CPT, you need to + explicitly set ``cmap="@earth_dist.cpt"``. + + Refer to :gmt-datasets:`earth-dist.html` for more details about available datasets, + including version information and references. + + Parameters + ---------- + resolution + The grid resolution. The suffix ``d`` and ``m`` stand for arc-degrees and + arc-minutes. + region + The subregion of the grid to load, in the form of a sequence [*xmin*, *xmax*, + *ymin*, *ymax*] or an ISO country code. Required for grids with resolutions + higher than 5 arc-minutes (i.e., ``"05m"``). + registration + Grid registration type. Either ``"pixel"`` for pixel registration or + ``"gridline"`` for gridline registration. + + Returns + ------- + grid + The GSHHG Earth distance to shoreline grid. Coordinates are latitude and + longitude in degrees. Distances are in kilometers, where positive (negative) + values mean land to coastline (ocean to coastline). + + Note + ---- + The registration and coordinate system type of the returned + :class:`xarray.DataArray` grid can be accessed via the GMT accessors (i.e., + ``grid.gmt.registration`` and ``grid.gmt.gtype`` respectively). However, these + properties may be lost after specific grid operations (such as slicing) and will + need to be manually set before passing the grid to any PyGMT data processing or + plotting functions. Refer to :class:`pygmt.GMTDataArrayAccessor` for detailed + explanations and workarounds. + + Examples + -------- + + >>> from pygmt.datasets import load_earth_dist + >>> # load the default grid (gridline-registered 1 arc-degree grid) + >>> grid = load_earth_dist() + >>> # load the 30 arc-minutes grid with "gridline" registration + >>> grid = load_earth_dist(resolution="30m", registration="gridline") + >>> # load high-resolution (5 arc-minutes) grid for a specific region + >>> grid = load_earth_dist( + ... resolution="05m", + ... region=[120, 160, 30, 60], + ... registration="gridline", + ... ) + """ + grid = _load_remote_dataset( + name="earth_dist", + prefix="earth_dist", + resolution=resolution, + region=region, + registration=registration, + ) + return grid diff --git a/pygmt/datasets/earth_free_air_anomaly.py b/pygmt/datasets/earth_free_air_anomaly.py index da48977d688..d85911496d6 100644 --- a/pygmt/datasets/earth_free_air_anomaly.py +++ b/pygmt/datasets/earth_free_air_anomaly.py @@ -1,6 +1,6 @@ """ -Function to download the IGPP Earth free-air anomaly dataset from the GMT data server, -and load as :class:`xarray.DataArray`. +Function to download the IGPP Earth free-air anomaly and uncertainty datasets from +the GMT data server, and load as :class:`xarray.DataArray`. The grids are available in various resolutions. """ @@ -20,36 +20,43 @@ def load_earth_free_air_anomaly( ] = "01d", region: Sequence[float] | str | None = None, registration: Literal["gridline", "pixel", None] = None, + uncertainty: bool = False, ) -> xr.DataArray: r""" - Load the IGPP Earth free-air anomaly dataset in various resolutions. + Load the IGPP Earth free-air anomaly and uncertainty datasets in various + resolutions. - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_faa.jpg - :width: 80 % - :align: center + .. list-table:: + :widths: 50 50 + :header-rows: 1 - IGPP Earth free-air anomaly dataset. + * - IGPP Earth free-air anomaly + - IGPP Earth free-air anomaly uncertainty + * - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_faa.jpg + - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_faaerror.jpg - The grids are downloaded to a user data directory - (usually ``~/.gmt/server/earth/earth_faa/``) the first time you invoke - this function. Afterwards, it will load the grid from the data directory. - So you'll need an internet connection the first time around. + The grids are downloaded to a user data directory (usually + ``~/.gmt/server/earth/earth_faa/`` or ``~/.gmt/server/earth/earth_faaerror/``) the + first time you invoke this function. Afterwards, it will load the grid from data + directory. So you'll need an internet connection the first time around. These grids can also be accessed by passing in the file name - **@earth_faa**\_\ *res*\[_\ *reg*] to any grid processing function or - plotting method. *res* is the grid resolution (see below), and *reg* is - the grid registration type (**p** for pixel registration or **g** for - gridline registration). - - The default color palette table (CPT) for this dataset is *@earth_faa.cpt*. - It's implicitly used when passing in the file name of the dataset to any - grid plotting method if no CPT is explicitly specified. When the dataset - is loaded and plotted as an :class:`xarray.DataArray` object, the default - CPT is ignored, and GMT's default CPT (*turbo*) is used. To use the - dataset-specific CPT, you need to explicitly set ``cmap="@earth_faa.cpt"``. - - Refer to :gmt-datasets:`earth-faa.html` for more details about available - datasets, including version information and references. + **@earth_faa_type**\_\ *res*\[_\ *reg*] to any grid processing function or + plotting method. *earth_faa_type* is the GMT name for the dataset. The available + options are **earth_faa** and **earth_faaerror**. *res* is the grid resolution (see + below), and *reg* is the grid registration type (**p** for pixel registration or + **g** for gridline registration). + + The default color palette tables (CPTs) for these datasets are *@earth_faa.cpt* and + *@earth_faaerror.cpt*. The dataset-specific CPT is implicitly used when passing in + the file name of the dataset to any grid plotting method if no CPT is explicitly + specified. When the dataset is loaded and plotted as an :class:`xarray.DataArray` + object, the default CPT is ignored, and GMT's default CPT (*turbo*) is used. To use + the dataset-specific CPT, you need to explicitly set ``cmap="@earth_faa.cpt"`` or + ``cmap="@earth_faaerror.cpt"``. + + Refer to :gmt-datasets:`earth-faa.html` and :gmt-datasets:`earth-faaerror.html` for + more details about available datasets, including version information and references. Parameters ---------- @@ -62,26 +69,28 @@ def load_earth_free_air_anomaly( higher than 5 arc-minutes (i.e., ``"05m"``). registration Grid registration type. Either ``"pixel"`` for pixel registration or - ``"gridline"`` for gridline registration. Default is ``None``, means - ``"gridline"`` for all resolutions except ``"01m"`` which is - ``"pixel"`` only. + ``"gridline"`` for gridline registration. Default is ``None``, which means + ``"gridline"`` for all resolutions except ``"01m"`` which is ``"pixel"`` + only. + uncertainty + By default, the Earth free-air anomaly values are returned. Set to ``True`` to + return the related uncertainties instead. Returns ------- grid - The Earth free-air anomaly grid. Coordinates are latitude and - longitude in degrees. Units are in mGal. + The Earth free-air anomaly (uncertainty) grid. Coordinates are latitude and + longitude in degrees. Values and uncertainties are in mGal. Note ---- The registration and coordinate system type of the returned - :class:`xarray.DataArray` grid can be accessed via the GMT accessors - (i.e., ``grid.gmt.registration`` and ``grid.gmt.gtype`` respectively). - However, these properties may be lost after specific grid operations (such - as slicing) and will need to be manually set before passing the grid to any - PyGMT data processing or plotting functions. Refer to - :class:`pygmt.GMTDataArrayAccessor` for detailed explanations and - workarounds. + :class:`xarray.DataArray` grid can be accessed via the GMT accessors (i.e., + ``grid.gmt.registration`` and ``grid.gmt.gtype`` respectively). However, these + properties may be lost after specific grid operations (such as slicing) and will + need to be manually set before passing the grid to any PyGMT data processing or + plotting functions. Refer to :class:`pygmt.GMTDataArrayAccessor` for detailed + explanations and workarounds. Examples -------- @@ -89,18 +98,19 @@ def load_earth_free_air_anomaly( >>> from pygmt.datasets import load_earth_free_air_anomaly >>> # load the default grid (gridline-registered 1 arc-degree grid) >>> grid = load_earth_free_air_anomaly() + >>> # load the uncertainties related to the default grid + >>> grid = load_earth_free_air_anomaly(uncertainty=True) >>> # load the 30 arc-minutes grid with "gridline" registration >>> grid = load_earth_free_air_anomaly(resolution="30m", registration="gridline") >>> # load high-resolution (5 arc-minutes) grid for a specific region >>> grid = load_earth_free_air_anomaly( - ... resolution="05m", - ... region=[120, 160, 30, 60], - ... registration="gridline", + ... resolution="05m", region=[120, 160, 30, 60], registration="gridline" ... ) """ + prefix = "earth_faaerror" if uncertainty is True else "earth_faa" grid = _load_remote_dataset( - name="earth_faa", - prefix="earth_faa", + name=prefix, + prefix=prefix, resolution=resolution, region=region, registration=registration, diff --git a/pygmt/datasets/earth_magnetic_anomaly.py b/pygmt/datasets/earth_magnetic_anomaly.py index 61646a9743a..463f7cf93b9 100644 --- a/pygmt/datasets/earth_magnetic_anomaly.py +++ b/pygmt/datasets/earth_magnetic_anomaly.py @@ -76,10 +76,10 @@ def load_earth_magnetic_anomaly( higher than 5 arc-minutes (i.e., ``"05m"``). registration Grid registration type. Either ``"pixel"`` for pixel registration or - ``"gridline"`` for gridline registration. Default is ``None``, means + ``"gridline"`` for gridline registration. Default is ``None``, which means ``"gridline"`` for all resolutions except ``"02m"`` for - ``data_source="emag2"`` or ``data_source="emag2_4km"``, which are - ``"pixel"`` only. + ``data_source="emag2"`` or ``data_source="emag2_4km"``, which are ``"pixel"`` + only. data_source Select the source of the magnetic anomaly data. Available options are: @@ -139,10 +139,11 @@ def load_earth_magnetic_anomaly( "wdmam": "earth_wdmam", }.get(data_source) if prefix is None: - raise GMTInvalidInput( + msg = ( f"Invalid earth magnetic anomaly data source '{data_source}'. " "Valid values are 'emag2', 'emag2_4km', and 'wdmam'." ) + raise GMTInvalidInput(msg) grid = _load_remote_dataset( name="earth_wdmam" if data_source == "wdmam" else "earth_mag", prefix=prefix, diff --git a/pygmt/datasets/earth_mean_dynamic_topography.py b/pygmt/datasets/earth_mean_dynamic_topography.py new file mode 100644 index 00000000000..4ca50e476be --- /dev/null +++ b/pygmt/datasets/earth_mean_dynamic_topography.py @@ -0,0 +1,103 @@ +""" +Function to download the CNES Earth mean dynamic topography dataset from the GMT data +server, and load as :class:`xarray.DataArray`. + +The grids are available in various resolutions. +""" + +from collections.abc import Sequence +from typing import Literal + +import xarray as xr +from pygmt.datasets.load_remote_dataset import _load_remote_dataset + +__doctest_skip__ = ["load_earth_mean_dynamic_topography"] + + +def load_earth_mean_dynamic_topography( + resolution: Literal["01d", "30m", "20m", "15m", "10m", "07m"] = "01d", + region: Sequence[float] | str | None = None, + registration: Literal["gridline", "pixel"] = "gridline", +) -> xr.DataArray: + r""" + Load the CNES Earth mean dynamic topography dataset in various resolutions. + + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_mdt.jpg + :width: 80 % + :align: center + + CNES Earth mean dynamic topography dataset. + + The grids are downloaded to a user data directory (usually + ``~/.gmt/server/earth/earth_mdt/``) the first time you invoke this function. + Afterwards, it will load the grid from the data directory. So you'll need an + internet connection the first time around. + + These grids can also be accessed by passing in the file name + **@earth_mdt**\_\ *res*\[_\ *reg*] to any grid processing function or plotting + method. *res* is the grid resolution (see below), and *reg* is the grid registration + type (**p** for pixel registration or **g** for gridline registration). + + The default color palette table (CPT) for this dataset is *@earth_mdt.cpt*. It's + implicitly used when passing in the file name of the dataset to any grid plotting + method if no CPT is explicitly specified. When the dataset is loaded and plotted + as an :class:`xarray.DataArray` object, the default CPT is ignored, and GMT's + default CPT (*turbo*) is used. To use the dataset-specific CPT, you need to + explicitly set ``cmap="@earth_mdt.cpt"``. + + Refer to :gmt-datasets:`earth-mdt.html` for more details about available datasets, + including version information and references. + + Parameters + ---------- + resolution + The grid resolution. The suffix ``d`` and ``m`` stand for arc-degrees and + arc-minutes. Note that ``"07m"`` refers to a resolution of 7.5 arc-minutes. + region + The subregion of the grid to load, in the form of a sequence [*xmin*, *xmax*, + *ymin*, *ymax*] or an ISO country code. + registration + Grid registration type. Either ``"pixel"`` for pixel registration or + ``"gridline"`` for gridline registration. + + Returns + ------- + grid + The CNES Earth mean dynamic topography grid. Coordinates are latitude and + longitude in degrees. Values are in meters. + + Note + ---- + The registration and coordinate system type of the returned + :class:`xarray.DataArray` grid can be accessed via the GMT accessors (i.e., + ``grid.gmt.registration`` and ``grid.gmt.gtype`` respectively). However, these + properties may be lost after specific grid operations (such as slicing) and will + need to be manually set before passing the grid to any PyGMT data processing or + plotting functions. Refer to :class:`pygmt.GMTDataArrayAccessor` for detailed + explanations and workarounds. + + Examples + -------- + + >>> from pygmt.datasets import load_earth_mean_dynamic_topography + >>> # load the default grid (gridline-registered 1 arc-degree grid) + >>> grid = load_earth_mean_dynamic_topography() + >>> # load the 30 arc-minutes grid with "gridline" registration + >>> grid = load_earth_mean_dynamic_topography( + ... resolution="30m", registration="gridline" + ... ) + >>> # load high-resolution (7 arc-minutes) grid for a specific region + >>> grid = load_earth_mean_dynamic_topography( + ... resolution="07m", + ... region=[120, 160, 30, 60], + ... registration="gridline", + ... ) + """ + grid = _load_remote_dataset( + name="earth_mdt", + prefix="earth_mdt", + resolution=resolution, + region=region, + registration=registration, + ) + return grid diff --git a/pygmt/datasets/earth_mean_sea_surface.py b/pygmt/datasets/earth_mean_sea_surface.py new file mode 100644 index 00000000000..f4856d98626 --- /dev/null +++ b/pygmt/datasets/earth_mean_sea_surface.py @@ -0,0 +1,104 @@ +""" +Function to download the CNES Earth mean sea surface dataset from the GMT data +server, and load as :class:`xarray.DataArray`. + +The grids are available in various resolutions. +""" + +from collections.abc import Sequence +from typing import Literal + +import xarray as xr +from pygmt.datasets.load_remote_dataset import _load_remote_dataset + +__doctest_skip__ = ["load_earth_mean_sea_surface"] + + +def load_earth_mean_sea_surface( + resolution: Literal[ + "01d", "30m", "20m", "15m", "10m", "06m", "05m", "04m", "03m", "02m", "01m" + ] = "01d", + region: Sequence[float] | str | None = None, + registration: Literal["gridline", "pixel"] = "gridline", +) -> xr.DataArray: + r""" + Load the CNES Earth mean sea surface dataset in various resolutions. + + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_mss.jpg + :width: 80 % + :align: center + + CNES Earth mean sea surface dataset. + + The grids are downloaded to a user data directory (usually + ``~/.gmt/server/earth/earth_mss/``) the first time you invoke this function. + Afterwards, it will load the grid from the data directory. So you'll need an + internet connection the first time around. + + These grids can also be accessed by passing in the file name + **@earth_mss**\_\ *res*\[_\ *reg*] to any grid processing function or plotting + method. *res* is the grid resolution (see below), and *reg* is the grid registration + type (**p** for pixel registration or **g** for gridline registration). + + The default color palette table (CPT) for this dataset is *@earth_mss.cpt*. It's + implicitly used when passing in the file name of the dataset to any grid plotting + method if no CPT is explicitly specified. When the dataset is loaded and plotted + as an :class:`xarray.DataArray` object, the default CPT is ignored, and GMT's + default CPT (*turbo*) is used. To use the dataset-specific CPT, you need to + explicitly set ``cmap="@earth_mss.cpt"``. + + Refer to :gmt-datasets:`earth-mss.html` for more details about available datasets, + including version information and references. + + Parameters + ---------- + resolution + The grid resolution. The suffix ``d`` and ``m`` stand for arc-degrees and + arc-minutes. + region + The subregion of the grid to load, in the form of a sequence [*xmin*, *xmax*, + *ymin*, *ymax*] or an ISO country code. Required for grids with resolutions + higher than 5 arc-minutes (i.e., ``"05m"``). + registration + Grid registration type. Either ``"pixel"`` for pixel registration or + ``"gridline"`` for gridline registration. + + Returns + ------- + grid + The CNES Earth mean sea surface grid. Coordinates are latitude and + longitude in degrees. Values are in meters. + + Note + ---- + The registration and coordinate system type of the returned + :class:`xarray.DataArray` grid can be accessed via the GMT accessors (i.e., + ``grid.gmt.registration`` and ``grid.gmt.gtype`` respectively). However, these + properties may be lost after specific grid operations (such as slicing) and will + need to be manually set before passing the grid to any PyGMT data processing or + plotting functions. Refer to :class:`pygmt.GMTDataArrayAccessor` for detailed + explanations and workarounds. + + Examples + -------- + + >>> from pygmt.datasets import load_earth_mean_sea_surface + >>> # load the default grid (gridline-registered 1 arc-degree grid) + >>> grid = load_earth_mean_sea_surface() + >>> # load the 30 arc-minutes grid with "gridline" registration + >>> grid = load_earth_mean_sea_surface(resolution="30m", registration="gridline") + >>> # load high-resolution (5 arc-minutes) grid for a specific region + >>> grid = load_earth_mean_sea_surface( + ... resolution="05m", + ... region=[120, 160, 30, 60], + ... registration="gridline", + ... ) + """ + grid = _load_remote_dataset( + name="earth_mss", + prefix="earth_mss", + resolution=resolution, + region=region, + registration=registration, + ) + return grid diff --git a/pygmt/datasets/earth_relief.py b/pygmt/datasets/earth_relief.py index 4a00b1e52dc..f0090ca3bd2 100644 --- a/pygmt/datasets/earth_relief.py +++ b/pygmt/datasets/earth_relief.py @@ -83,9 +83,9 @@ def load_earth_relief( higher than 5 arc-minutes (i.e., ``"05m"``). registration Grid registration type. Either ``"pixel"`` for pixel registration or - ``"gridline"`` for gridline registration. Default is ``None``, means - ``"gridline"`` for all resolutions except ``"15s"`` which is - ``"pixel"`` only. + ``"gridline"`` for gridline registration. Default is ``None``, which means + ``"gridline"`` for all resolutions except ``"15s"`` which is ``"pixel"`` + only. data_source Select the source for the Earth relief data. Available options are: @@ -155,17 +155,19 @@ def load_earth_relief( "synbath": "earth_synbath", }.get(data_source) if prefix is None: - raise GMTInvalidInput( + msg = ( f"Invalid earth relief data source '{data_source}'. " "Valid values are 'igpp', 'gebco', 'gebcosi', and 'synbath'." ) + raise GMTInvalidInput(msg) # Use SRTM or not. if use_srtm and resolution in land_only_srtm_resolutions: if data_source != "igpp": - raise GMTInvalidInput( - f"Option 'use_srtm=True' doesn't work with data source '{data_source}'." - " Please set 'data_source' to 'igpp'." + msg = ( + f"Option 'use_srtm=True' doesn't work with data source '{data_source}'. " + "Please set 'data_source' to 'igpp'." ) + raise GMTInvalidInput(msg) prefix = "srtm_relief" # Choose earth relief dataset match data_source: diff --git a/pygmt/datasets/earth_vertical_gravity_gradient.py b/pygmt/datasets/earth_vertical_gravity_gradient.py index 2ebba4563bc..bcf00099fc2 100644 --- a/pygmt/datasets/earth_vertical_gravity_gradient.py +++ b/pygmt/datasets/earth_vertical_gravity_gradient.py @@ -62,9 +62,9 @@ def load_earth_vertical_gravity_gradient( higher than 5 arc-minutes (i.e., ``"05m"``). registration Grid registration type. Either ``"pixel"`` for pixel registration or - ``"gridline"`` for gridline registration. Default is ``None``, means - ``"gridline"`` for all resolutions except ``"01m"`` which is - ``"pixel"`` only. + ``"gridline"`` for gridline registration. Default is ``None``, which means + ``"gridline"`` for all resolutions except ``"01m"`` which is ``"pixel"`` + only. Returns ------- diff --git a/pygmt/datasets/load_remote_dataset.py b/pygmt/datasets/load_remote_dataset.py index 168a93583b2..41fe729d0e0 100644 --- a/pygmt/datasets/load_remote_dataset.py +++ b/pygmt/datasets/load_remote_dataset.py @@ -39,7 +39,9 @@ class GMTRemoteDataset(NamedTuple): Attributes ---------- description - The name assigned as an attribute to the DataArray. + The name assigned as an attribute to the DataArray. + kind + The kind of the dataset source. Valid values are ``"grid"`` and ``"image"``. units The units of the values in the DataArray. resolutions @@ -49,6 +51,7 @@ class GMTRemoteDataset(NamedTuple): """ description: str + kind: Literal["grid", "image"] units: str | None resolutions: dict[str, Resolution] extra_attributes: dict[str, Any] @@ -57,6 +60,7 @@ class GMTRemoteDataset(NamedTuple): datasets = { "earth_age": GMTRemoteDataset( description="EarthByte Earth seafloor crustal age", + kind="grid", units="Myr", extra_attributes={"horizontal_datum": "WGS84"}, resolutions={ @@ -75,6 +79,7 @@ class GMTRemoteDataset(NamedTuple): ), "earth_day": GMTRemoteDataset( description="NASA Day Images", + kind="image", units=None, extra_attributes={"long_name": "blue_marble", "horizontal_datum": "WGS84"}, resolutions={ @@ -92,8 +97,66 @@ class GMTRemoteDataset(NamedTuple): "30s": Resolution("30s", registrations=["pixel"]), }, ), + "earth_dist": GMTRemoteDataset( + description="GSHHG Earth distance to shoreline", + kind="grid", + units="kilometers", + extra_attributes={"horizontal_datum": "WGS84"}, + resolutions={ + "01d": Resolution("01d"), + "30m": Resolution("30m"), + "20m": Resolution("20m"), + "15m": Resolution("15m"), + "10m": Resolution("10m"), + "06m": Resolution("06m"), + "05m": Resolution("05m", tiled=True), + "04m": Resolution("04m", tiled=True), + "03m": Resolution("03m", tiled=True), + "02m": Resolution("02m", tiled=True), + "01m": Resolution("01m", registrations=["gridline"], tiled=True), + }, + ), + "earth_edefl": GMTRemoteDataset( + description="IGPP Earth east-west deflection", + kind="grid", + units="micro-radians", + extra_attributes={"horizontal_datum": "WGS84"}, + resolutions={ + "01d": Resolution("01d"), + "30m": Resolution("30m"), + "20m": Resolution("20m"), + "15m": Resolution("15m"), + "10m": Resolution("10m"), + "06m": Resolution("06m"), + "05m": Resolution("05m", tiled=True), + "04m": Resolution("04m", tiled=True), + "03m": Resolution("03m", tiled=True), + "02m": Resolution("02m", tiled=True), + "01m": Resolution("01m", registrations=["pixel"], tiled=True), + }, + ), "earth_faa": GMTRemoteDataset( description="IGPP Earth free-air anomaly", + kind="grid", + units="mGal", + extra_attributes={"horizontal_datum": "WGS84"}, + resolutions={ + "01d": Resolution("01d"), + "30m": Resolution("30m"), + "20m": Resolution("20m"), + "15m": Resolution("15m"), + "10m": Resolution("10m"), + "06m": Resolution("06m"), + "05m": Resolution("05m", tiled=True), + "04m": Resolution("04m", tiled=True), + "03m": Resolution("03m", tiled=True), + "02m": Resolution("02m", tiled=True), + "01m": Resolution("01m", registrations=["pixel"], tiled=True), + }, + ), + "earth_faaerror": GMTRemoteDataset( + description="IGPP Earth free-air anomaly errors", + kind="grid", units="mGal", extra_attributes={"horizontal_datum": "WGS84"}, resolutions={ @@ -112,6 +175,7 @@ class GMTRemoteDataset(NamedTuple): ), "earth_gebco": GMTRemoteDataset( description="GEBCO Earth relief", + kind="grid", units="meters", extra_attributes={"vertical_datum": "EGM96", "horizontal_datum": "WGS84"}, resolutions={ @@ -134,7 +198,8 @@ class GMTRemoteDataset(NamedTuple): ), "earth_geoid": GMTRemoteDataset( description="EGM2008 Earth geoid", - units="m", + kind="grid", + units="meters", extra_attributes={"horizontal_datum": "WGS84"}, resolutions={ "01d": Resolution("01d"), @@ -152,6 +217,7 @@ class GMTRemoteDataset(NamedTuple): ), "earth_igpp": GMTRemoteDataset( description="IGPP Earth relief", + kind="grid", units="meters", extra_attributes={"vertical_datum": "EGM96", "horizontal_datum": "WGS84"}, resolutions={ @@ -174,6 +240,7 @@ class GMTRemoteDataset(NamedTuple): ), "earth_mag": GMTRemoteDataset( description="EMAG2 Earth Magnetic Anomaly Model", + kind="grid", units="nT", extra_attributes={"horizontal_datum": "WGS84"}, resolutions={ @@ -191,6 +258,7 @@ class GMTRemoteDataset(NamedTuple): ), "earth_mask": GMTRemoteDataset( description="GSHHG Earth mask", + kind="grid", units=None, extra_attributes={"horizontal_datum": "WGS84"}, resolutions={ @@ -209,8 +277,28 @@ class GMTRemoteDataset(NamedTuple): "15s": Resolution("15s"), }, ), + "earth_mss": GMTRemoteDataset( + description="CNES Earth mean sea surface", + kind="grid", + units="meters", + extra_attributes={"horizontal_datum": "WGS84"}, + resolutions={ + "01d": Resolution("01d"), + "30m": Resolution("30m"), + "20m": Resolution("20m"), + "15m": Resolution("15m"), + "10m": Resolution("10m"), + "06m": Resolution("06m"), + "05m": Resolution("05m", tiled=True), + "04m": Resolution("04m", tiled=True), + "03m": Resolution("03m", tiled=True), + "02m": Resolution("02m", tiled=True), + "01m": Resolution("01m", tiled=True, registrations=["gridline"]), + }, + ), "earth_night": GMTRemoteDataset( description="NASA Night Images", + kind="image", units=None, extra_attributes={"long_name": "black_marble", "horizontal_datum": "WGS84"}, resolutions={ @@ -228,8 +316,42 @@ class GMTRemoteDataset(NamedTuple): "30s": Resolution("30s", registrations=["pixel"]), }, ), + "earth_mdt": GMTRemoteDataset( + description="CNES Earth mean dynamic topography", + kind="grid", + units="meters", + extra_attributes={"horizontal_datum": "WGS84"}, + resolutions={ + "01d": Resolution("01d"), + "30m": Resolution("30m"), + "20m": Resolution("20m"), + "15m": Resolution("15m"), + "10m": Resolution("10m"), + "07m": Resolution("07m", registrations=["gridline"]), + }, + ), + "earth_ndefl": GMTRemoteDataset( + description="IGPP Earth north-south deflection", + kind="grid", + units="micro-radians", + extra_attributes={"horizontal_datum": "WGS84"}, + resolutions={ + "01d": Resolution("01d"), + "30m": Resolution("30m"), + "20m": Resolution("20m"), + "15m": Resolution("15m"), + "10m": Resolution("10m"), + "06m": Resolution("06m"), + "05m": Resolution("05m", tiled=True), + "04m": Resolution("04m", tiled=True), + "03m": Resolution("03m", tiled=True), + "02m": Resolution("02m", tiled=True), + "01m": Resolution("01m", registrations=["pixel"], tiled=True), + }, + ), "earth_vgg": GMTRemoteDataset( description="IGPP Earth vertical gravity gradient", + kind="grid", units="Eotvos", extra_attributes={"horizontal_datum": "WGS84"}, resolutions={ @@ -248,6 +370,7 @@ class GMTRemoteDataset(NamedTuple): ), "earth_wdmam": GMTRemoteDataset( description="WDMAM World Digital Magnetic Anomaly Map", + kind="grid", units="nT", extra_attributes={"horizontal_datum": "WGS84"}, resolutions={ @@ -264,6 +387,7 @@ class GMTRemoteDataset(NamedTuple): ), "mars_relief": GMTRemoteDataset( description="NASA Mars (MOLA) relief", + kind="grid", units="meters", extra_attributes={}, resolutions={ @@ -285,6 +409,7 @@ class GMTRemoteDataset(NamedTuple): ), "moon_relief": GMTRemoteDataset( description="USGS Moon (LOLA) relief", + kind="grid", units="meters", extra_attributes={}, resolutions={ @@ -306,6 +431,7 @@ class GMTRemoteDataset(NamedTuple): ), "mercury_relief": GMTRemoteDataset( description="USGS Mercury relief", + kind="grid", units="meters", extra_attributes={}, resolutions={ @@ -325,6 +451,7 @@ class GMTRemoteDataset(NamedTuple): ), "pluto_relief": GMTRemoteDataset( description="USGS Pluto relief", + kind="grid", units="meters", extra_attributes={}, resolutions={ @@ -344,6 +471,7 @@ class GMTRemoteDataset(NamedTuple): ), "venus_relief": GMTRemoteDataset( description="NASA Magellan Venus relief", + kind="grid", units="meters", extra_attributes={}, resolutions={ @@ -442,15 +570,16 @@ def _load_remote_dataset( raise GMTInvalidInput(msg) fname = f"@{prefix}_{resolution}_{reg}" - kind = "image" if name in {"earth_day", "earth_night"} else "grid" - kwdict = {"R": region, "T": {"grid": "g", "image": "i"}[kind]} + kwdict = {"R": region, "T": {"grid": "g", "image": "i"}[dataset.kind]} with Session() as lib: - with lib.virtualfile_out(kind=kind) as voutgrd: + with lib.virtualfile_out(kind=dataset.kind) as voutgrd: lib.call_module( module="read", args=[fname, voutgrd, *build_arg_list(kwdict)], ) - grid = lib.virtualfile_to_raster(kind=kind, outgrid=None, vfname=voutgrd) + grid = lib.virtualfile_to_raster( + kind=dataset.kind, outgrid=None, vfname=voutgrd + ) # Full path to the grid if not tiled grids. source = which(fname, download="a") if not resinfo.tiled else None diff --git a/pygmt/datasets/mars_relief.py b/pygmt/datasets/mars_relief.py index 1d2cb631fd9..e04f048d42f 100644 --- a/pygmt/datasets/mars_relief.py +++ b/pygmt/datasets/mars_relief.py @@ -67,14 +67,15 @@ def load_mars_relief( ---------- resolution The grid resolution. The suffix ``d``, ``m`` and ``s`` stand for arc-degrees, - arc-minutes and arc-seconds. + arc-minutes and arc-seconds. Note that ``"12s"`` refers to a resolution of + 12.1468873601 arc-seconds. region The subregion of the grid to load, in the form of a sequence [*xmin*, *xmax*, *ymin*, *ymax*] or an ISO country code. Required for grids with resolutions higher than 5 arc-minutes (i.e., ``"05m"``). registration Grid registration type. Either ``"pixel"`` for pixel registration or - ``"gridline"`` for gridline registration. Default is ``None``, means + ``"gridline"`` for gridline registration. Default is ``None``, which means ``"gridline"`` for all resolutions except for ``"12s"`` which is ``"pixel"`` only. diff --git a/pygmt/datasets/mercury_relief.py b/pygmt/datasets/mercury_relief.py index f3c360c356a..06da8194e4a 100644 --- a/pygmt/datasets/mercury_relief.py +++ b/pygmt/datasets/mercury_relief.py @@ -65,14 +65,15 @@ def load_mercury_relief( ---------- resolution The grid resolution. The suffix ``d``, ``m`` and ``s`` stand for arc-degrees, - arc-minutes and arc-seconds. + arc-minutes and arc-seconds. Note that ``"56s"`` refers to a resolution of + 56.25 arc-seconds. region The subregion of the grid to load, in the form of a sequence [*xmin*, *xmax*, *ymin*, *ymax*] or an ISO country code. Required for grids with resolutions higher than 5 arc-minutes (i.e., ``"05m"``). registration Grid registration type. Either ``"pixel"`` for pixel registration or - ``"gridline"`` for gridline registration. Default is ``None``, means + ``"gridline"`` for gridline registration. Default is ``None``, which means ``"gridline"`` for all resolutions except for ``"56s"`` which is ``"pixel"`` only. diff --git a/pygmt/datasets/moon_relief.py b/pygmt/datasets/moon_relief.py index 9daab0f47a5..66817f42d08 100644 --- a/pygmt/datasets/moon_relief.py +++ b/pygmt/datasets/moon_relief.py @@ -67,14 +67,15 @@ def load_moon_relief( ---------- resolution The grid resolution. The suffix ``d``, ``m`` and ``s`` stand for arc-degrees, - arc-minutes and arc-seconds. + arc-minutes and arc-seconds. Note that ``"14s"`` refers to a resolution of + 14.0625 arc-seconds. region The subregion of the grid to load, in the form of a sequence [*xmin*, *xmax*, *ymin*, *ymax*] or an ISO country code. Required for grids with resolutions higher than 5 arc-minutes (i.e., ``"05m"``). registration Grid registration type. Either ``"pixel"`` for pixel registration or - ``"gridline"`` for gridline registration. Default is ``None``, means + ``"gridline"`` for gridline registration. Default is ``None``, which means ``"gridline"`` for all resolutions except for ``"14s"`` which is ``"pixel"`` only. diff --git a/pygmt/datasets/pluto_relief.py b/pygmt/datasets/pluto_relief.py index 620545899da..9a9998d228a 100644 --- a/pygmt/datasets/pluto_relief.py +++ b/pygmt/datasets/pluto_relief.py @@ -65,14 +65,15 @@ def load_pluto_relief( ---------- resolution The grid resolution. The suffix ``d``, ``m`` and ``s`` stand for arc-degrees, - arc-minutes and arc-seconds. + arc-minutes and arc-seconds. Note that ``"52s"`` refers to a resolution of + 52.0732883317 arc-seconds. region The subregion of the grid to load, in the form of a sequence [*xmin*, *xmax*, *ymin*, *ymax*] or an ISO country code. Required for grids with resolutions higher than 5 arc-minutes (i.e., ``"05m"``). registration Grid registration type. Either ``"pixel"`` for pixel registration or - ``"gridline"`` for gridline registration. Default is ``None``, means + ``"gridline"`` for gridline registration. Default is ``None``, which means ``"gridline"`` for all resolutions except for ``"52s"`` which is ``"pixel"`` only. diff --git a/pygmt/datasets/samples.py b/pygmt/datasets/samples.py index bf0864c14d5..3739ee630b8 100644 --- a/pygmt/datasets/samples.py +++ b/pygmt/datasets/samples.py @@ -347,5 +347,6 @@ def load_sample_data( >>> data = load_sample_data("bathymetry") """ # noqa: W505 if name not in datasets: - raise GMTInvalidInput(f"Invalid dataset name '{name}'.") + msg = f"Invalid dataset name '{name}'." + raise GMTInvalidInput(msg) return datasets[name].func() diff --git a/pygmt/datasets/tile_map.py b/pygmt/datasets/tile_map.py index c3322c39c36..caa18dacd84 100644 --- a/pygmt/datasets/tile_map.py +++ b/pygmt/datasets/tile_map.py @@ -3,7 +3,6 @@ :class:`xarray.DataArray`. """ -import contextlib from collections.abc import Sequence from typing import Literal @@ -11,17 +10,22 @@ try: import contextily + from rasterio.crs import CRS from xyzservices import TileProvider _HAS_CONTEXTILY = True except ImportError: + CRS = None TileProvider = None _HAS_CONTEXTILY = False -with contextlib.suppress(ImportError): - # rioxarray is needed to register the rio accessor +try: import rioxarray # noqa: F401 + _HAS_RIOXARRAY = True +except ImportError: + _HAS_RIOXARRAY = False + import numpy as np import xarray as xr @@ -33,6 +37,7 @@ def load_tile_map( zoom: int | Literal["auto"] = "auto", source: TileProvider | str | None = None, lonlat: bool = True, + crs: str | CRS = "EPSG:3857", wait: int = 0, max_retries: int = 2, zoom_adjust: int | None = None, @@ -42,7 +47,8 @@ def load_tile_map( The tiles that compose the map are merged and georeferenced into an :class:`xarray.DataArray` image with 3 bands (RGB). Note that the returned image is - in a Spherical Mercator (EPSG:3857) coordinate reference system. + in a Spherical Mercator (EPSG:3857) coordinate reference system (CRS) by default, + but can be customized using the ``crs`` parameter. Parameters ---------- @@ -80,6 +86,10 @@ def load_tile_map( lonlat If ``False``, coordinates in ``region`` are assumed to be Spherical Mercator as opposed to longitude/latitude. + crs + Coordinate reference system (CRS) of the returned :class:`xarray.DataArray` + image. Default is ``"EPSG:3857"`` (i.e., Spherical Mercator). The CRS can be in + either string or :class:`rasterio.crs.CRS` format. wait If the tile API is rate-limited, the number of seconds to wait between a failed request and the next try. @@ -119,15 +129,19 @@ def load_tile_map( Frozen({'band': 3, 'y': 256, 'x': 512}) >>> raster.coords # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE Coordinates: - * band (band) uint8 ... 1 2 3 - * y (y) float64 ... -7.081e-10 -7.858e+04 ... -1.996e+07 -2.004e+07 - * x (x) float64 ... -2.004e+07 -1.996e+07 ... 1.996e+07 2.004e+07 + * band (band) uint8... 1 2 3 + * y (y) float64... -7.081e-10 -7.858e+04 ... -1.996e+07 -2.004e+07 + * x (x) float64... -2.004e+07 -1.996e+07 ... 1.996e+07 2.004e+07 spatial_ref int... 0 >>> # CRS is set only if rioxarray is available >>> if hasattr(raster, "rio"): - ... raster.rio.crs - CRS.from_wkt(...) + ... raster.rio.crs.to_string() + 'EPSG:3857' """ + # The CRS of the source tile provider. If the source is a TileProvider object, use + # its crs attribute if available. Otherwise, default to EPSG:3857. + _source_crs = getattr(source, "crs", "EPSG:3857") + if not _HAS_CONTEXTILY: msg = ( "Package `contextily` is required to be installed to use this function. " @@ -136,28 +150,35 @@ def load_tile_map( ) raise ImportError(msg) - contextily_kwargs = {} + if crs != _source_crs and not _HAS_RIOXARRAY: + msg = ( + f"Package `rioxarray` is required if CRS is not '{_source_crs}'. " + "Please use `python -m pip install rioxarray` or " + "`mamba install -c conda-forge rioxarray` to install the package." + ) + raise ImportError(msg) + + # Keyword arguments for contextily.bounds2img + contextily_kwargs = { + "zoom": zoom, + "source": source, + "ll": lonlat, + "wait": wait, + "max_retries": max_retries, + } + # TODO(contextily>=1.5.0): Remove the check for the 'zoom_adjust' parameter. if zoom_adjust is not None: - contextily_kwargs["zoom_adjust"] = zoom_adjust if Version(contextily.__version__) < Version("1.5.0"): msg = ( "The `zoom_adjust` parameter requires `contextily>=1.5.0` to work. " "Please upgrade contextily, or manually set the `zoom` level instead." ) - raise TypeError(msg) + raise ValueError(msg) + contextily_kwargs["zoom_adjust"] = zoom_adjust west, east, south, north = region image, extent = contextily.bounds2img( - w=west, - s=south, - e=east, - n=north, - zoom=zoom, - source=source, - ll=lonlat, - wait=wait, - max_retries=max_retries, - **contextily_kwargs, + w=west, s=south, e=east, n=north, **contextily_kwargs ) # Turn RGBA img from channel-last to channel-first and get 3-band RGB only @@ -176,8 +197,12 @@ def load_tile_map( dims=("band", "y", "x"), ) - # If rioxarray is installed, set the coordinate reference system + # If rioxarray is installed, set the coordinate reference system. if hasattr(dataarray, "rio"): - dataarray = dataarray.rio.write_crs(input_crs="EPSG:3857") + dataarray = dataarray.rio.write_crs(input_crs=_source_crs) + + # Reproject raster image from the source CRS to the specified CRS. + if crs != _source_crs: + dataarray = dataarray.rio.reproject(dst_crs=crs) return dataarray diff --git a/pygmt/datatypes/dataset.py b/pygmt/datatypes/dataset.py index 354eefe543e..6f07f1a6092 100644 --- a/pygmt/datatypes/dataset.py +++ b/pygmt/datatypes/dataset.py @@ -11,6 +11,60 @@ import pandas as pd +class _GMT_DATASEGMENT(ctp.Structure): # noqa: N801 + """ + GMT datasegment structure for holding a segment with multiple columns. + """ + + _fields_: ClassVar = [ + # Number of rows/records in this segment + ("n_rows", ctp.c_uint64), + # Number of fields in each record + ("n_columns", ctp.c_uint64), + # Minimum coordinate for each column + ("min", ctp.POINTER(ctp.c_double)), + # Maximum coordinate for each column + ("max", ctp.POINTER(ctp.c_double)), + # Data x, y, and possibly other columns + ("data", ctp.POINTER(ctp.POINTER(ctp.c_double))), + # Label string (if applicable) + ("label", ctp.c_char_p), + # Segment header (if applicable) + ("header", ctp.c_char_p), + # text beyond the data + ("text", ctp.POINTER(ctp.c_char_p)), + # Book-keeping variables "hidden" from the API + ("hidden", ctp.c_void_p), + ] + + +class _GMT_DATATABLE(ctp.Structure): # noqa: N801 + """ + GMT datatable structure for holding a table with multiple segments. + """ + + _fields_: ClassVar = [ + # Number of file header records (0 if no header) + ("n_headers", ctp.c_uint), + # Number of columns (fields) in each record + ("n_columns", ctp.c_uint64), + # Number of segments in the array + ("n_segments", ctp.c_uint64), + # Total number of data records across all segments + ("n_records", ctp.c_uint64), + # Minimum coordinate for each column + ("min", ctp.POINTER(ctp.c_double)), + # Maximum coordinate for each column + ("max", ctp.POINTER(ctp.c_double)), + # Array with all file header records, if any + ("header", ctp.POINTER(ctp.c_char_p)), + # Pointer to array of segments + ("segment", ctp.POINTER(ctp.POINTER(_GMT_DATASEGMENT))), + # Book-keeping variables "hidden" from the API + ("hidden", ctp.c_void_p), + ] + + class _GMT_DATASET(ctp.Structure): # noqa: N801 """ GMT dataset structure for holding multiple tables (files). @@ -67,58 +121,6 @@ class _GMT_DATASET(ctp.Structure): # noqa: N801 [b'TEXT8 TEXT90', b'TEXT123 TEXT456789'] """ - class _GMT_DATATABLE(ctp.Structure): # noqa: N801 - """ - GMT datatable structure for holding a table with multiple segments. - """ - - class _GMT_DATASEGMENT(ctp.Structure): # noqa: N801 - """ - GMT datasegment structure for holding a segment with multiple columns. - """ - - _fields_: ClassVar = [ - # Number of rows/records in this segment - ("n_rows", ctp.c_uint64), - # Number of fields in each record - ("n_columns", ctp.c_uint64), - # Minimum coordinate for each column - ("min", ctp.POINTER(ctp.c_double)), - # Maximum coordinate for each column - ("max", ctp.POINTER(ctp.c_double)), - # Data x, y, and possibly other columns - ("data", ctp.POINTER(ctp.POINTER(ctp.c_double))), - # Label string (if applicable) - ("label", ctp.c_char_p), - # Segment header (if applicable) - ("header", ctp.c_char_p), - # text beyond the data - ("text", ctp.POINTER(ctp.c_char_p)), - # Book-keeping variables "hidden" from the API - ("hidden", ctp.c_void_p), - ] - - _fields_: ClassVar = [ - # Number of file header records (0 if no header) - ("n_headers", ctp.c_uint), - # Number of columns (fields) in each record - ("n_columns", ctp.c_uint64), - # Number of segments in the array - ("n_segments", ctp.c_uint64), - # Total number of data records across all segments - ("n_records", ctp.c_uint64), - # Minimum coordinate for each column - ("min", ctp.POINTER(ctp.c_double)), - # Maximum coordinate for each column - ("max", ctp.POINTER(ctp.c_double)), - # Array with all file header records, if any - ("header", ctp.POINTER(ctp.c_char_p)), - # Pointer to array of segments - ("segment", ctp.POINTER(ctp.POINTER(_GMT_DATASEGMENT))), - # Book-keeping variables "hidden" from the API - ("hidden", ctp.c_void_p), - ] - _fields_: ClassVar = [ # The total number of tables (files) contained ("n_tables", ctp.c_uint64), diff --git a/pygmt/encodings.py b/pygmt/encodings.py index 44ed3153e85..09c749c4c82 100644 --- a/pygmt/encodings.py +++ b/pygmt/encodings.py @@ -1,8 +1,9 @@ +# noqa: A005 """ Character encodings supported by GMT. Currently, Adobe Symbol, Adobe ZapfDingbats, Adobe ISOLatin1+ and ISO-8859-x (x can be -1-11, 13-16) encodings are supported. Adobe Standard encoding is not supported. +1-11, 13-16) encodings are supported. Adobe Standard+ encoding is not supported. The corresponding Unicode characters in each Adobe character encoding are generated from the mapping tables and conversion scripts in the diff --git a/pygmt/enums.py b/pygmt/enums.py index 7c0984eccb9..34419a57e06 100644 --- a/pygmt/enums.py +++ b/pygmt/enums.py @@ -37,3 +37,21 @@ class GridFormat(IntEnum): GD = 22 #: Import through GDAL EI = 23 #: ESRI Arc/Info ASCII Grid Interchange format (ASCII integer) EF = 24 #: ESRI Arc/Info ASCII Grid Interchange format (ASCII float) + + +class GridRegistration(IntEnum): + """ + Enum for the grid registration. + """ + + GRIDLINE = 0 #: Gridline registration + PIXEL = 1 #: Pixel registration + + +class GridType(IntEnum): + """ + Enum for the grid type. + """ + + CARTESIAN = 0 #: Cartesian grid + GEOGRAPHIC = 1 #: Geographic grid diff --git a/pygmt/figure.py b/pygmt/figure.py index 4163ab52eb1..5c5d4734ce6 100644 --- a/pygmt/figure.py +++ b/pygmt/figure.py @@ -6,7 +6,7 @@ import os from pathlib import Path, PurePath from tempfile import TemporaryDirectory -from typing import Literal +from typing import Literal, overload try: import IPython @@ -95,19 +95,19 @@ class Figure: 122.94, 145.82, 20.53, 45.52 """ - def __init__(self): + def __init__(self) -> None: self._name = unique_name() self._preview_dir = TemporaryDirectory(prefix=f"{self._name}-preview-") self._activate_figure() - def __del__(self): + def __del__(self) -> None: """ Clean up the temporary directory that stores the previews. """ if hasattr(self, "_preview_dir"): self._preview_dir.cleanup() - def _activate_figure(self): + def _activate_figure(self) -> None: """ Start and/or activate the current figure. @@ -144,7 +144,7 @@ def savefig( show: bool = False, worldfile: bool = False, **kwargs, - ): + ) -> None: """ Save the figure to an image file. @@ -248,6 +248,7 @@ def savefig( kwargs.pop("metadata", None) self.psconvert(prefix=prefix, fmt=fmts[ext], crop=crop, **kwargs) + # TODO(GMT>=6.5.0): Remove the workaround for upstream bug in GMT<6.5.0. # Remove the .pgw world file if exists. Not necessary after GMT 6.5.0. # See upstream fix https://github.com/GenericMappingTools/gmt/pull/7865 if ext == "tiff": @@ -267,7 +268,7 @@ def show( width: int = 500, waiting: float = 0.5, **kwargs, - ): + ) -> None: """ Display a preview of the figure. @@ -353,6 +354,14 @@ def show( ) raise GMTInvalidInput(msg) + @overload + def _preview( + self, fmt: str, dpi: int, as_bytes: Literal[True] = True, **kwargs + ) -> bytes: ... + @overload + def _preview( + self, fmt: str, dpi: int, as_bytes: Literal[False] = False, **kwargs + ) -> str: ... def _preview(self, fmt: str, dpi: int, as_bytes: bool = False, **kwargs): """ Grab a preview of the figure. @@ -380,7 +389,7 @@ def _preview(self, fmt: str, dpi: int, as_bytes: bool = False, **kwargs): return fname.read_bytes() return fname - def _repr_png_(self): + def _repr_png_(self) -> bytes: """ Show a PNG preview if the object is returned in an interactive shell. @@ -389,7 +398,7 @@ def _repr_png_(self): png = self._preview(fmt="png", dpi=70, anti_alias=True, as_bytes=True) return png - def _repr_html_(self): + def _repr_html_(self) -> str: """ Show the PNG image embedded in HTML with a controlled width. @@ -409,6 +418,7 @@ def _repr_html_(self): grdimage, grdview, histogram, + hlines, image, inset, legend, @@ -427,11 +437,12 @@ def _repr_html_(self): tilemap, timestamp, velo, + vlines, wiggle, ) -def set_display(method: Literal["external", "notebook", "none", None] = None): +def set_display(method: Literal["external", "notebook", "none", None] = None) -> None: """ Set the display method when calling :meth:`pygmt.Figure.show`. diff --git a/pygmt/helpers/caching.py b/pygmt/helpers/caching.py index 26648b17060..ea6bed8d4cf 100644 --- a/pygmt/helpers/caching.py +++ b/pygmt/helpers/caching.py @@ -5,7 +5,7 @@ from pygmt.src import which -def cache_data(): +def cache_data() -> None: """ Download GMT remote data files used in PyGMT tests and docs to cache folder. """ @@ -14,7 +14,10 @@ def cache_data(): # List of GMT remote datasets. "@earth_age_01d_g", "@earth_day_01d", + "@earth_dist_01d", + "@earth_edefl_01d", "@earth_faa_01d_g", + "@earth_faaerror_01d_g", "@earth_gebco_01d_g", "@earth_gebcosi_01d_g", "@earth_gebcosi_15m_p", @@ -22,6 +25,10 @@ def cache_data(): "@earth_mag_01d_g", "@earth_mag4km_01d_g", "@earth_mask_01d_g", + "@earth_mdt_01d_g", + "@earth_mdt_07m_g", + "@earth_mss_01d_g", + "@earth_ndefl_01d", "@earth_night_01d", "@earth_relief_01d_g", "@earth_relief_01d_p", @@ -45,10 +52,15 @@ def cache_data(): "@N00W030.earth_age_01m_g.nc", "@N30E060.earth_age_01m_g.nc", "@N30E090.earth_age_01m_g.nc", + "@N00W030.earth_dist_01m_g.nc", + "@N00W030.earth_edefl_01m_p.nc", "@N00W030.earth_faa_01m_p.nc", + "@N00W030.earth_faaerror_01m_p.nc", "@N00W030.earth_geoid_01m_g.nc", "@S30W060.earth_mag_02m_p.nc", "@S30W120.earth_mag4km_02m_p.nc", + "@N30E090.earth_mss_01m_g.nc", + "@N30E090.earth_ndefl_01m_p.nc", "@N00W090.earth_relief_03m_p.nc", "@N00E135.earth_relief_30s_g.nc", "@N00W010.earth_relief_15s_p.nc", diff --git a/pygmt/helpers/decorators.py b/pygmt/helpers/decorators.py index 525cc611a54..3c4f9dd5510 100644 --- a/pygmt/helpers/decorators.py +++ b/pygmt/helpers/decorators.py @@ -37,17 +37,17 @@ (using ``binary="o"``), where *ncols* is the number of data columns of *type*, which must be one of: - - **c** - int8_t (1-byte signed char) - - **u** - uint8_t (1-byte unsigned char) - - **h** - int16_t (2-byte signed int) - - **H** - uint16_t (2-byte unsigned int) - - **i** - int32_t (4-byte signed int) - - **I** - uint32_t (4-byte unsigned int) - - **l** - int64_t (8-byte signed int) - - **L** - uint64_t (8-byte unsigned int) - - **f** - 4-byte single-precision float - - **d** - 8-byte double-precision float - - **x** - use to skip *ncols* anywhere in the record + - **c**: int8_t (1-byte signed char) + - **u**: uint8_t (1-byte unsigned char) + - **h**: int16_t (2-byte signed int) + - **H**: uint16_t (2-byte unsigned int) + - **i**: int32_t (4-byte signed int) + - **I**: uint32_t (4-byte unsigned int) + - **l**: int64_t (8-byte signed int) + - **L**: uint64_t (8-byte unsigned int) + - **f**: 4-byte single-precision float + - **d**: 8-byte double-precision float + - **x**: use to skip *ncols* anywhere in the record For records with mixed types, append additional comma-separated combinations of *ncols* *type* (no space). The following modifiers @@ -84,9 +84,9 @@ **e**\|\ **f**\|\ **g**. Determine how spherical distances are calculated. - - **e** - Ellipsoidal (or geodesic) mode - - **f** - Flat Earth mode - - **g** - Great circle distance [Default] + - **e**: Ellipsoidal (or geodesic) mode + - **f**: Flat Earth mode + - **g**: Great circle distance [Default] All spherical distance calculations depend on the current ellipsoid (:gmt-term:`PROJ_ELLIPSOID`), the definition of the mean radius @@ -118,16 +118,16 @@ a list with each item containing a string describing one set of criteria. - - **x**\|\ **X** - define a gap when there is a large enough - change in the x coordinates (upper case to use projected + - **x**\|\ **X**: define a gap when there is a large enough + change in the x coordinates (uppercase to use projected coordinates). - - **y**\|\ **Y** - define a gap when there is a large enough - change in the y coordinates (upper case to use projected + - **y**\|\ **Y**: define a gap when there is a large enough + change in the y coordinates (uppercase to use projected coordinates). - - **d**\|\ **D** - define a gap when there is a large enough - distance between coordinates (upper case to use projected + - **d**\|\ **D**: define a gap when there is a large enough + distance between coordinates (uppercase to use projected coordinates). - - **z** - define a gap when there is a large enough change in + - **z**: define a gap when there is a large enough change in the z data. Use **+c**\ *col* to change the z data column [Default *col* is 2 (i.e., 3rd column)]. @@ -146,9 +146,9 @@ One of the following modifiers can be appended: - - **+n** - specify that the previous value minus the current + - **+n**: specify that the previous value minus the current column value must exceed *gap* for a break to be imposed. - - **+p** - specify that the current value minus the previous + - **+p**: specify that the current value minus the previous value must exceed *gap* for a break to be imposed.""", "grid": r""" grid : str or xarray.DataArray @@ -367,13 +367,13 @@ Select verbosity level [Default is **w**], which modulates the messages written to stderr. Choose among 7 levels of verbosity: - - **q** - Quiet, not even fatal error messages are produced - - **e** - Error messages only - - **w** - Warnings [Default] - - **t** - Timings (report runtimes for time-intensive algorithms) - - **i** - Informational messages (same as ``verbose=True``) - - **c** - Compatibility warnings - - **d** - Debugging messages""", + - **q**: Quiet, not even fatal error messages are produced + - **e**: Error messages only + - **w**: Warnings [Default] + - **t**: Timings (report runtimes for time-intensive algorithms) + - **i**: Informational messages (same as ``verbose=True``) + - **c**: Compatibility warnings + - **d**: Debugging messages""", "wrap": r""" wrap : str **y**\|\ **a**\|\ **w**\|\ **d**\|\ **h**\|\ **m**\|\ **s**\|\ @@ -382,14 +382,14 @@ different column if selected via **+c**\ *col*. The following cyclical coordinate transformations are supported: - - **y** - yearly cycle (normalized) - - **a** - annual cycle (monthly) - - **w** - weekly cycle (day) - - **d** - daily cycle (hour) - - **h** - hourly cycle (minute) - - **m** - minute cycle (second) - - **s** - second cycle (second) - - **c** - custom cycle (normalized) + - **y**: yearly cycle (normalized) + - **a**: annual cycle (monthly) + - **w**: weekly cycle (day) + - **d**: daily cycle (hour) + - **h**: hourly cycle (minute) + - **m**: minute cycle (second) + - **s**: second cycle (second) + - **c**: custom cycle (normalized) Full documentation is at :gmt-docs:`gmt.html#w-full`.""", } @@ -456,18 +456,22 @@ def fmt_docstring(module_func): Select map :doc:`projection `. **Aliases:** + .. hlist:: + :columns: 3 - - J = projection - - R = region + - J = projection + - R = region """ # noqa: D410,D411 filler_text = {} if hasattr(module_func, "aliases"): aliases = ["**Aliases:**\n"] + aliases.append(".. hlist::") + aliases.append(" :columns: 3\n") for arg in sorted(module_func.aliases): alias = module_func.aliases[arg] - aliases.append(f"- {arg} = {alias}") + aliases.append(f" - {arg} = {alias}") filler_text["aliases"] = "\n".join(aliases) filler_text["table-like"] = ( @@ -569,10 +573,11 @@ def new_module(*args, **kwargs): """ for short_param, long_alias in aliases.items(): if long_alias in kwargs and short_param in kwargs: - raise GMTInvalidInput( + msg = ( f"Parameters in short-form ({short_param}) and " f"long-form ({long_alias}) can't coexist." ) + raise GMTInvalidInput(msg) if long_alias in kwargs: kwargs[short_param] = kwargs.pop(long_alias) elif short_param in kwargs: @@ -721,9 +726,8 @@ def kwargs_to_strings(**conversions): for arg, fmt in conversions.items(): if fmt not in separators: - raise GMTInvalidInput( - f"Invalid conversion type '{fmt}' for argument '{arg}'." - ) + msg = f"Invalid conversion type '{fmt}' for argument '{arg}'." + raise GMTInvalidInput(msg) # Make the actual decorator function def converter(module_func): @@ -837,9 +841,8 @@ def new_module(*args, **kwargs): """ if oldname in kwargs: if newname in kwargs: - raise GMTInvalidInput( - f"Can't provide both '{newname}' and '{oldname}'." - ) + msg = f"Can't provide both '{newname}' and '{oldname}'." + raise GMTInvalidInput(msg) msg = ( f"The '{oldname}' parameter has been deprecated since {deprecate_version}" f" and will be removed in {remove_version}." diff --git a/pygmt/helpers/tempfile.py b/pygmt/helpers/tempfile.py index b32e422f28d..70cc688156a 100644 --- a/pygmt/helpers/tempfile.py +++ b/pygmt/helpers/tempfile.py @@ -1,3 +1,4 @@ +# noqa: A005 """ Utilities for dealing with temporary file management. """ @@ -59,7 +60,7 @@ class GMTTempFile: [0. 0. 0.] [1. 1. 1.] [2. 2. 2.] """ - def __init__(self, prefix: str = "pygmt-", suffix: str = ".txt"): + def __init__(self, prefix: str = "pygmt-", suffix: str = ".txt") -> None: """ Initialize the object. """ @@ -144,6 +145,8 @@ def tempfile_from_geojson(geojson): # https://github.com/geopandas/geopandas/issues/967#issuecomment-842877704 # https://github.com/GenericMappingTools/pygmt/issues/2497 int32_info = np.iinfo(np.int32) + # TODO(GeoPandas>=1.0): Remove the workaround for GeoPandas < 1. + # The default engine is "fiona" in v0.x and "pyogrio" in v1.x. if Version(gpd.__version__).major < 1: # GeoPandas v0.x # The default engine 'fiona' supports the 'schema' parameter. if geojson.index.name is None: @@ -203,10 +206,11 @@ def tempfile_from_image(image): try: image.rio.to_raster(raster_path=tmpfile.name) except AttributeError as e: # object has no attribute 'rio' - raise ImportError( + msg = ( "Package `rioxarray` is required to be installed to use this function. " "Please use `python -m pip install rioxarray` or " "`mamba install -c conda-forge rioxarray` " "to install the package." - ) from e + ) + raise ImportError(msg) from e yield tmpfile.name diff --git a/pygmt/helpers/testing.py b/pygmt/helpers/testing.py index 77e2a952bf3..29dfd08df19 100644 --- a/pygmt/helpers/testing.py +++ b/pygmt/helpers/testing.py @@ -112,11 +112,12 @@ def wrapper(*args, ext="png", request=None, **kwargs): else: # Images are not the same for key in ["actual", "expected", "diff"]: err[key] = Path(err[key]).relative_to(".") - raise GMTImageComparisonFailure( + msg = ( f"images not close (RMS {err['rms']:.3f}):\n" f"\t{err['actual']}\n" f"\t{err['expected']}" ) + raise GMTImageComparisonFailure(msg) finally: del fig_ref del fig_test diff --git a/pygmt/helpers/utils.py b/pygmt/helpers/utils.py index beb3da630d0..e32f5bbe03f 100644 --- a/pygmt/helpers/utils.py +++ b/pygmt/helpers/utils.py @@ -12,16 +12,38 @@ import time import webbrowser from collections.abc import Iterable, Mapping, Sequence +from pathlib import Path from typing import Any, Literal import xarray as xr from pygmt.encodings import charset from pygmt.exceptions import GMTInvalidInput +# Type hints for the list of encodings supported by PyGMT. +Encoding = Literal[ + "ascii", + "ISOLatin1+", + "ISO-8859-1", + "ISO-8859-2", + "ISO-8859-3", + "ISO-8859-4", + "ISO-8859-5", + "ISO-8859-6", + "ISO-8859-7", + "ISO-8859-8", + "ISO-8859-9", + "ISO-8859-10", + "ISO-8859-11", + "ISO-8859-13", + "ISO-8859-14", + "ISO-8859-15", + "ISO-8859-16", +] + def _validate_data_input( data=None, x=None, y=None, z=None, required_z=False, required_data=True, kind=None -): +) -> None: """ Check if the combination of data/x/y/z is valid. @@ -123,27 +145,7 @@ def _validate_data_input( raise GMTInvalidInput(msg) -def _check_encoding( - argstr: str, -) -> Literal[ - "ascii", - "ISOLatin1+", - "ISO-8859-1", - "ISO-8859-2", - "ISO-8859-3", - "ISO-8859-4", - "ISO-8859-5", - "ISO-8859-6", - "ISO-8859-7", - "ISO-8859-8", - "ISO-8859-9", - "ISO-8859-10", - "ISO-8859-11", - "ISO-8859-13", - "ISO-8859-14", - "ISO-8859-15", - "ISO-8859-16", -]: +def _check_encoding(argstr: str) -> Encoding: """ Check the charset encoding of a string. @@ -185,10 +187,9 @@ def _check_encoding( adobe_chars = set(charset["Symbol"].values()) | set( charset["ZapfDingbats"].values() ) - for encoding in ["ISOLatin1+"] + [f"ISO-8859-{i}" for i in range(1, 17)]: - if encoding == "ISO-8859-12": # ISO-8859-12 was abandoned. Skip it. - continue - if all(c in (set(charset[encoding].values()) | adobe_chars) for c in argstr): + for encoding in ["ISOLatin1+"] + [f"ISO-8859-{i}" for i in range(1, 17) if i != 12]: + chars = set(charset[encoding].values()) | adobe_chars + if all(c in chars for c in argstr): return encoding # type: ignore[return-value] # Return the "ISOLatin1+" encoding if the string contains characters from multiple # charset encodings or contains characters that are not in any charset encoding. @@ -340,34 +341,13 @@ def data_kind( return kind # type: ignore[return-value] -def non_ascii_to_octal( - argstr: str, - encoding: Literal[ - "ascii", - "ISOLatin1+", - "ISO-8859-1", - "ISO-8859-2", - "ISO-8859-3", - "ISO-8859-4", - "ISO-8859-5", - "ISO-8859-6", - "ISO-8859-7", - "ISO-8859-8", - "ISO-8859-9", - "ISO-8859-10", - "ISO-8859-11", - "ISO-8859-13", - "ISO-8859-14", - "ISO-8859-15", - "ISO-8859-16", - ] = "ISOLatin1+", -) -> str: +def non_ascii_to_octal(argstr: str, encoding: Encoding = "ISOLatin1+") -> str: r""" Translate non-ASCII characters to their corresponding octal codes. Currently, only non-ASCII characters in the Adobe ISOLatin1+, Adobe Symbol, Adobe ZapfDingbats, and ISO-8850-x (x can be in 1-11, 13-17) encodings are supported. - The Adobe Standard encoding is not supported yet. + The Adobe Standard+ encoding is not supported. Parameters ---------- @@ -496,7 +476,8 @@ def build_arg_list( # noqa: PLR0912 gmt_args = [] for key, value in kwdict.items(): if len(key) > 2: # Raise an exception for unrecognized options - raise GMTInvalidInput(f"Unrecognized parameter '{key}'.") + msg = f"Unrecognized parameter '{key}'." + raise GMTInvalidInput(msg) if value is None or value is False: # Exclude arguments that are None or False pass elif value is True: @@ -506,17 +487,14 @@ def build_arg_list( # noqa: PLR0912 else: gmt_args.append(f"-{key}{value}") - # Convert non-ASCII characters (if any) in the arguments to octal codes - encoding = _check_encoding("".join(gmt_args)) - if encoding != "ascii": - gmt_args = [non_ascii_to_octal(arg, encoding=encoding) for arg in gmt_args] gmt_args = sorted(gmt_args) - # Set --PS_CHAR_ENCODING=encoding if necessary - if encoding not in {"ascii", "ISOLatin1+"} and not ( - confdict and "PS_CHAR_ENCODING" in confdict - ): - gmt_args.append(f"--PS_CHAR_ENCODING={encoding}") + # Convert non-ASCII characters (if any) in the arguments to octal codes and set + # --PS_CHAR_ENCODING=encoding if necessary + if (encoding := _check_encoding("".join(gmt_args))) != "ascii": + gmt_args = [non_ascii_to_octal(arg, encoding=encoding) for arg in gmt_args] + if not (confdict and "PS_CHAR_ENCODING" in confdict): + gmt_args.append(f"--PS_CHAR_ENCODING={encoding}") if confdict: gmt_args.extend(f"--{key}={value}" for key, value in confdict.items()) @@ -532,7 +510,8 @@ def build_arg_list( # noqa: PLR0912 or str(outfile) in {"", ".", ".."} or str(outfile).endswith(("/", "\\")) ): - raise GMTInvalidInput(f"Invalid output file name '{outfile}'.") + msg = f"Invalid output file name '{outfile}'." + raise GMTInvalidInput(msg) gmt_args.append(f"->{outfile}") return gmt_args @@ -573,7 +552,7 @@ def is_nonstr_iter(value): return isinstance(value, Iterable) and not isinstance(value, str) -def launch_external_viewer(fname: str, waiting: float = 0): +def launch_external_viewer(fname: str, waiting: float = 0) -> None: """ Open a file in an external viewer program. @@ -595,9 +574,8 @@ def launch_external_viewer(fname: str, waiting: float = 0): } match sys.platform: - case name if ( - (name == "linux" or name.startswith("freebsd")) - and (xdgopen := shutil.which("xdg-open")) + case name if (name == "linux" or name.startswith("freebsd")) and ( + xdgopen := shutil.which("xdg-open") ): # Linux/FreeBSD subprocess.run([xdgopen, fname], check=False, **run_args) # type:ignore[call-overload] case "darwin": # macOS @@ -605,7 +583,7 @@ def launch_external_viewer(fname: str, waiting: float = 0): case "win32": # Windows os.startfile(fname) # type:ignore[attr-defined] # noqa: S606 case _: # Fall back to the browser if can't recognize the operating system. - webbrowser.open_new_tab(f"file://{fname}") + webbrowser.open_new_tab(f"file://{Path(fname).resolve()}") if waiting > 0: # Preview images will be deleted when a GMT modern-mode session ends, but the # external viewer program may take a few seconds to open the images. diff --git a/pygmt/helpers/validators.py b/pygmt/helpers/validators.py index 879cc023b1f..7138dc1d6cb 100644 --- a/pygmt/helpers/validators.py +++ b/pygmt/helpers/validators.py @@ -50,11 +50,11 @@ def validate_output_table_type( 'file' """ if output_type not in {"file", "numpy", "pandas"}: - raise GMTInvalidInput( - "Must specify 'output_type' either as 'file', 'numpy', or 'pandas'." - ) + msg = "Must specify 'output_type' either as 'file', 'numpy', or 'pandas'." + raise GMTInvalidInput(msg) if output_type == "file" and outfile is None: - raise GMTInvalidInput("Must specify 'outfile' for output_type='file'.") + msg = "Must specify 'outfile' for output_type='file'." + raise GMTInvalidInput(msg) if output_type != "file" and outfile is not None: msg = ( f"Changing 'output_type' from '{output_type}' to 'file' " diff --git a/pygmt/io.py b/pygmt/io.py index 33f3bfdbb12..a4ba289c7d9 100644 --- a/pygmt/io.py +++ b/pygmt/io.py @@ -1,3 +1,4 @@ +# noqa: A005 """ PyGMT input/output (I/O) utilities. """ @@ -38,7 +39,8 @@ def load_dataarray(filename_or_obj, **kwargs): xarray.open_dataarray """ if "cache" in kwargs: - raise TypeError("cache has no effect in this context") + msg = "'cache' has no effect in this context." + raise TypeError(msg) with xr.open_dataarray(filename_or_obj, **kwargs) as dataarray: result = dataarray.load() diff --git a/pygmt/session_management.py b/pygmt/session_management.py index 87055bb44e8..ac18218c858 100644 --- a/pygmt/session_management.py +++ b/pygmt/session_management.py @@ -9,7 +9,7 @@ from pygmt.helpers import unique_name -def begin(): +def begin() -> None: """ Initiate a new GMT modern mode session. @@ -17,25 +17,23 @@ def begin(): Only meant to be used once for creating the global session. """ - # On Windows, need to set GMT_SESSION_NAME to a unique value + # On Windows, need to set GMT_SESSION_NAME to a unique value. if sys.platform == "win32": os.environ["GMT_SESSION_NAME"] = unique_name() prefix = "pygmt-session" with Session() as lib: lib.call_module(module="begin", args=[prefix]) - # pygmt relies on GMT modern mode with GMT_COMPATIBILITY at version 6 + # PyGMT relies on GMT modern mode with GMT_COMPATIBILITY at version 6. lib.call_module(module="set", args=["GMT_COMPATIBILITY=6"]) -def end(): +def end() -> None: """ - Terminate GMT modern mode session and optionally produce the figure files. + Terminate the GMT modern mode session created by :func:`pygmt.begin`. - Called after :func:`pygmt.begin` and all commands that you want included in - a session. Will finalize any PostScript plots that were made in the - background, convert them to the desired format (specified in - ``pygmt.begin``), and bring the figures to the working directory. + Called after :func:`pygmt.begin` and all commands that you want included in a + session. Will clean up the session directory completely. """ with Session() as lib: lib.call_module(module="end", args=[]) diff --git a/pygmt/src/__init__.py b/pygmt/src/__init__.py index e4db7321963..8905124f917 100644 --- a/pygmt/src/__init__.py +++ b/pygmt/src/__init__.py @@ -29,6 +29,7 @@ from pygmt.src.grdview import grdview from pygmt.src.grdvolume import grdvolume from pygmt.src.histogram import histogram +from pygmt.src.hlines import hlines from pygmt.src.image import image from pygmt.src.info import info from pygmt.src.inset import inset @@ -56,6 +57,7 @@ from pygmt.src.timestamp import timestamp from pygmt.src.triangulate import triangulate from pygmt.src.velo import velo +from pygmt.src.vlines import vlines from pygmt.src.which import which from pygmt.src.wiggle import wiggle from pygmt.src.x2sys_cross import x2sys_cross diff --git a/pygmt/src/binstats.py b/pygmt/src/binstats.py index 6430d1cf787..ba9b205b3ef 100644 --- a/pygmt/src/binstats.py +++ b/pygmt/src/binstats.py @@ -43,38 +43,36 @@ def binstats(data, outgrid: str | None = None, **kwargs) -> xr.DataArray | None: Parameters ---------- data : str, {table-like} - A file name of an ASCII data table or a 2-D - {table-classes}. + A file name of an ASCII data table or a 2-D {table-classes}. {outgrid} statistic : str **a**\|\ **d**\|\ **g**\|\ **i**\|\ **l**\|\ **L**\|\ **m**\|\ **n**\ \|\ **o**\|\ **p**\|\ **q**\ [*quant*]\|\ **r**\|\ **s**\|\ **u**\ \|\ **U**\|\ **z**. Choose the statistic that will be computed per node based on the - points that are within *radius* distance of the node. Select one of: + points that are within *radius* distance of the node. Select one of: - - **a** for mean (average) - - **d** for median absolute deviation (MAD) - - **g** for full (max-min) range - - **i** for 25-75% interquartile range - - **l** for minimum (low) - - **L** for minimum of positive values only - - **m** for median - - **n** the number of values - - **o** for LMS scale - - **p** for mode (maximum likelihood) - - **q** for selected quantile (append desired quantile in - 0-100% range [50]) - - **r** for the r.m.s. - - **s** for standard deviation - - **u** for maximum (upper) - - **U** for maximum of negative values only - - **z** for the sum + - **a**: mean (average) + - **d**: median absolute deviation (MAD) + - **g**: full (max-min) range + - **i**: 25-75% interquartile range + - **l**: minimum (low) + - **L**: minimum of positive values only + - **m**: median + - **n**: number of values + - **o**: LMS scale + - **p**: mode (maximum likelihood) + - **q**: selected quantile (append desired quantile in 0-100% range [50]) + - **r**: root mean square (RMS) + - **s**: standard deviation + - **u**: maximum (upper) + - **U**: maximum of negative values only + - **z**: sum empty : float Set the value assigned to empty nodes [Default is NaN]. normalize : bool Normalize the resulting grid values by the area represented by the - search *radius* [no normalization]. + search *radius* [Default is no normalization]. search_radius : float or str Set the *search_radius* that determines which data points are considered close to a node. Append the distance unit. @@ -100,7 +98,7 @@ def binstats(data, outgrid: str | None = None, **kwargs) -> xr.DataArray | None: Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) """ with Session() as lib: diff --git a/pygmt/src/blockm.py b/pygmt/src/blockm.py index a8b35d6c942..581167bcd5d 100644 --- a/pygmt/src/blockm.py +++ b/pygmt/src/blockm.py @@ -126,10 +126,10 @@ def blockmean( [**m**\|\ **n**\|\ **s**\|\ **w**]. Type of summary values calculated by blockmean. - - **m** - reports mean value [Default] - - **n** - report the number of input points inside each block - - **s** - report the sum of all z-values inside a block - - **w** - report the sum of weights + - **m**: reports mean value [Default] + - **n**: report the number of input points inside each block + - **s**: report the sum of all z-values inside a block + - **w**: report the sum of weights {region} {verbose} {aspatial} @@ -244,7 +244,7 @@ def blockmedian( ret Return type depends on ``outfile`` and ``output_type``: - - ``None`` if ``outfile`` is set (output will be stored in file set by + - ``None`` if ``outfile`` is set (output will be stored in the file set by ``outfile``) - :class:`pandas.DataFrame` or :class:`numpy.ndarray` if ``outfile`` is not set (depends on ``output_type``) @@ -342,7 +342,7 @@ def blockmode( ret Return type depends on ``outfile`` and ``output_type``: - - ``None`` if ``outfile`` is set (output will be stored in file set by + - ``None`` if ``outfile`` is set (output will be stored in the file set by ``outfile``) - :class:`pandas.DataFrame` or :class:`numpy.ndarray` if ``outfile`` is not set (depends on ``output_type``) diff --git a/pygmt/src/coast.py b/pygmt/src/coast.py index 87a99692541..066c3b00de5 100644 --- a/pygmt/src/coast.py +++ b/pygmt/src/coast.py @@ -87,26 +87,26 @@ def coast(self, **kwargs): Choose from the list of river types below; pass a list to ``rivers`` to use multiple arguments. - - ``0``: Double-lined rivers (river-lakes) - - ``1``: Permanent major rivers - - ``2``: Additional major rivers - - ``3``: Additional rivers - - ``4``: Minor rivers - - ``5``: Intermittent rivers - major - - ``6``: Intermittent rivers - additional - - ``7``: Intermittent rivers - minor - - ``8``: Major canals - - ``9``: Minor canals - - ``10``: Irrigation canals + - ``0``: double-lined rivers (river-lakes) + - ``1``: permanent major rivers + - ``2``: additional major rivers + - ``3``: additional rivers + - ``4``: minor rivers + - ``5``: intermittent rivers - major + - ``6``: intermittent rivers - additional + - ``7``: intermittent rivers - minor + - ``8``: major canals + - ``9``: minor canals + - ``10``: irrigation canals You can also choose from several preconfigured river groups: - - ``"a"``: All rivers and canals (0-10) - - ``"A"``: All rivers and canals except river-lakes (1-10) - - ``"r"``: All permanent rivers (0-4) - - ``"R"``: All permanent rivers except river-lakes (1-4) - - ``"i"``: All intermittent rivers (5-7) - - ``"c"``: All canals (8-10) + - ``"a"``: rivers and canals (``0`` - ``10``) + - ``"A"``: rivers and canals except river-lakes (``1`` - ``10``) + - ``"r"``: permanent rivers (``0`` - ``4``) + - ``"R"``: permanent rivers except river-lakes (``1`` - ``4``) + - ``"i"``: intermittent rivers (``5`` - ``7``) + - ``"c"``: canals (``8`` - ``10``) map_scale : str [**g**\|\ **j**\|\ **J**\|\ **n**\|\ **x**]\ *refpoint*\ **+w**\ *length*. @@ -137,10 +137,10 @@ def coast(self, **kwargs): Choose from the list of boundaries below. Pass a list to ``borders`` to use multiple arguments. - - ``1``: National boundaries - - ``2``: State boundaries within the Americas - - ``3``: Marine boundaries - - ``"a"``: All boundaries (1-3) + - ``1``: national boundaries + - ``2``: state boundaries within the Americas + - ``3``: marine boundaries + - ``"a"``: all boundaries (``1`` - ``3``) water : str Select filling "wet" areas. @@ -195,9 +195,10 @@ def coast(self, **kwargs): """ kwargs = self._preprocess(**kwargs) if not args_in_kwargs(args=["C", "G", "S", "I", "N", "E", "Q", "W"], kwargs=kwargs): - raise GMTInvalidInput( - """At least one of the following parameters must be specified: - lakes, land, water, rivers, borders, dcw, Q, or shorelines""" + msg = ( + "At least one of the following parameters must be specified: " + "lakes, land, water, rivers, borders, dcw, Q, or shorelines." ) + raise GMTInvalidInput(msg) with Session() as lib: lib.call_module(module="coast", args=build_arg_list(kwargs)) diff --git a/pygmt/src/dimfilter.py b/pygmt/src/dimfilter.py index 496f35644e9..3149a55212c 100644 --- a/pygmt/src/dimfilter.py +++ b/pygmt/src/dimfilter.py @@ -51,34 +51,32 @@ def dimfilter(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None distance : int or str Distance flag tells how grid (x,y) relates to filter width, as follows: - - **0**\ : grid (x,y) in same units as *width*, Cartesian distances. - - **1**\ : grid (x,y) in degrees, *width* in kilometers, Cartesian - distances. - - **2**\ : grid (x,y) in degrees, *width* in km, dx scaled by - cos(middle y), Cartesian distances. + - **0**: grid (x,y) in same units as *width*, Cartesian distances. + - **1**: grid (x,y) in degrees, *width* in kilometers, Cartesian distances. + - **2**: grid (x,y) in degrees, *width* in km, dx scaled by cos(middle y), + Cartesian distances. The above options are fastest because they allow weight matrix to be computed only once. The next two options are slower because they recompute weights for each latitude. - - **3**\ : grid (x,y) in degrees, *width* in km, dx scaled by - cosine(y), Cartesian distance calculation. - - **4**\ : grid (x,y) in degrees, *width* in km, Spherical distance - calculation. + - **3**: grid (x,y) in degrees, *width* in km, dx scaled by cosine(y), + Cartesian distance calculation. + - **4**: grid (x,y) in degrees, *width* in km, Spherical distance calculation. filter : str **x**\ *width*\ [**+l**\|\ **u**]. Set the primary filter type. Choose among convolution and non-convolution filters. Use the filter code **x** followed by the full diameter *width*. Available convolution filters are: - - (**b**) Boxcar: All weights are equal. - - (**c**) Cosine Arch: Weights follow a cosine arch curve. - - (**g**) Gaussian: Weights are given by the Gaussian function. + - **b**: boxcar. Aall weights are equal. + - **c**: cosine arch. Weights follow a cosine arch curve. + - **g**: Gaussian. Weights are given by the Gaussian function. Non-convolution filters are: - - (**m**) Median: Returns median value. - - (**p**) Maximum likelihood probability (a mode estimator): Return + - **m**: median. Returns median value. + - **p**: maximum likelihood probability (a mode estimator). Return modal value. If more than one mode is found we return their average value. Append **+l** or **+h** to the filter width if you want to return the smallest or largest of each sector's modal values. @@ -89,23 +87,23 @@ def dimfilter(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None set to 1, the secondary filter is not effective. Available secondary filters **x** are: - - (**l**) Lower: Return the minimum of all filtered values. - - (**u**) Upper: Return the maximum of all filtered values. - - (**a**) Average: Return the mean of all filtered values. - - (**m**) Median: Return the median of all filtered values. - - (**p**) Mode: Return the mode of all filtered values: + - **l**: lower. Return the minimum of all filtered values. + - **u**: upper. Return the maximum of all filtered values. + - **a**: average. Return the mean of all filtered values. + - **m**: median. Return the median of all filtered values. + - **p**: mode. Return the mode of all filtered values. If more than one mode is found we return their average value. Append **+l** or **+h** to the sectors if you rather want to return the smallest or largest of the modal values. spacing : str or list *x_inc* [and optionally *y_inc*] is the output increment. Append **m** to indicate minutes, or **c** to indicate seconds. If the new - *x_inc*, *y_inc* are NOT integer multiples of the old ones (in the + *x_inc*, *y_inc* are **not** integer multiples of the old ones (in the input data), filtering will be considerably slower. [Default is same - as input.] + as the input.] region : str or list [*xmin*, *xmax*, *ymin*, *ymax*]. - Define the region of the output points [Default is same as input]. + Define the region of the output points [Default is the same as the input]. {verbose} Returns @@ -114,7 +112,7 @@ def dimfilter(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Example @@ -137,11 +135,11 @@ def dimfilter(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None ... ) """ if not all(arg in kwargs for arg in ["D", "F", "N"]) and "Q" not in kwargs: - raise GMTInvalidInput( - """At least one of the following parameters must be specified: - distance, filters, or sectors.""" + msg = ( + "At least one of the following parameters must be specified: " + "distance, filters, or sectors." ) - + raise GMTInvalidInput(msg) with Session() as lib: with ( lib.virtualfile_in(check_kind="raster", data=grid) as vingrd, diff --git a/pygmt/src/filter1d.py b/pygmt/src/filter1d.py index 0469385dcf9..6d0e97938cf 100644 --- a/pygmt/src/filter1d.py +++ b/pygmt/src/filter1d.py @@ -57,25 +57,25 @@ def filter1d( Available convolution filter types are: - - (**b**) Boxcar: All weights are equal. - - (**c**) Cosine Arch: Weights follow a cosine arch curve. - - (**g**) Gaussian: Weights are given by the Gaussian function. - - (**f**) Custom: Instead of *width* give name of a one-column file + - **b**: boxcar. All weights are equal. + - **c**: cosine arch. Weights follow a cosine arch curve. + - **g**: Gaussian. Weights are given by the Gaussian function. + - **f**: custom. Instead of *width* give name of a one-column file with your own weight coefficients. Non-convolution filter types are: - - (**m**) Median: Returns median value. - - (**p**) Maximum likelihood probability (a mode estimator): Return + - **m**: median. Returns median value. + - **p**: maximum likelihood probability (a mode estimator). Return modal value. If more than one mode is found we return their average value. Append **+l** or **+u** if you rather want to return the lowermost or uppermost of the modal values. - - (**l**) Lower: Return the minimum of all values. - - (**L**) Lower: Return minimum of all positive values only. - - (**u**) Upper: Return maximum of all values. - - (**U**) Upper: Return maximum of all negative values only. + - **l**: lower (absolute). Return the minimum of all values. + - **L**: lower. Return minimum of all positive values only. + - **u**: upper (absolute). Return maximum of all values. + - **U**: upper. Return maximum of all negative values only. - Upper case type **B**, **C**, **G**, **M**, **P**, **F** will use + Uppercase type **B**, **C**, **G**, **M**, **P**, **F** will use robust filter versions: i.e., replace outliers (2.5 L1 scale off median, using 1.4826 \* median absolute deviation [MAD]) with median during filtering. @@ -105,12 +105,14 @@ def filter1d( ret Return type depends on ``outfile`` and ``output_type``: - - None if ``outfile`` is set (output will be stored in file set by ``outfile``) + - ``None`` if ``outfile`` is set (output will be stored in the file set by + ``outfile``) - :class:`pandas.DataFrame` or :class:`numpy.ndarray` if ``outfile`` is not set (depends on ``output_type``) """ if kwargs.get("F") is None: - raise GMTInvalidInput("Pass a required argument to 'filter_type'.") + msg = "Pass a required argument to 'filter_type'." + raise GMTInvalidInput(msg) output_type = validate_output_table_type(output_type, outfile=outfile) diff --git a/pygmt/src/grd2cpt.py b/pygmt/src/grd2cpt.py index 240d47d30c7..44b582b3269 100644 --- a/pygmt/src/grd2cpt.py +++ b/pygmt/src/grd2cpt.py @@ -182,7 +182,8 @@ def grd2cpt(grid, **kwargs): >>> fig.show() """ if kwargs.get("W") is not None and kwargs.get("Ww") is not None: - raise GMTInvalidInput("Set only categorical or cyclic to True, not both.") + msg = "Set only 'categorical' or 'cyclic' to True, not both." + raise GMTInvalidInput(msg) if (output := kwargs.pop("H", None)) is not None: kwargs["H"] = True diff --git a/pygmt/src/grd2xyz.py b/pygmt/src/grd2xyz.py index f6c046137f1..b31d0013a25 100644 --- a/pygmt/src/grd2xyz.py +++ b/pygmt/src/grd2xyz.py @@ -96,17 +96,17 @@ def grd2xyz( appending **y**. If the byte-order needs to be swapped, append **w**. Select one of several data types (all binary except **a**): - * **a** ASCII representation of a single item per record - * **c** int8_t, signed 1-byte character - * **u** uint8_t, unsigned 1-byte character - * **h** int16_t, short 2-byte integer - * **H** uint16_t, unsigned short 2-byte integer - * **i** int32_t, 4-byte integer - * **I** uint32_t, unsigned 4-byte integer - * **l** int64_t, long (8-byte) integer - * **L** uint64_t, unsigned long (8-byte) integer - * **f** 4-byte floating point single precision - * **d** 8-byte floating point double precision + - **a**: ASCII representation of a single item per record + - **c**: int8_t, signed 1-byte character + - **u**: uint8_t, unsigned 1-byte character + - **h**: int16_t, short 2-byte integer + - **H**: uint16_t, unsigned short 2-byte integer + - **i**: int32_t, 4-byte integer + - **I**: uint32_t, unsigned 4-byte integer + - **l**: int64_t, long (8-byte) integer + - **L**: uint64_t, unsigned long (8-byte) integer + - **f**: 4-byte floating point single precision + - **d**: 8-byte floating point double precision Default format is scanline orientation of ASCII numbers: **TLa**. {binary} @@ -121,7 +121,8 @@ def grd2xyz( ret Return type depends on ``outfile`` and ``output_type``: - - None if ``outfile`` is set (output will be stored in file set by ``outfile``) + - ``None`` if ``outfile`` is set (output will be stored in the file set by + ``outfile``) - :class:`pandas.DataFrame` or :class:`numpy.ndarray` if ``outfile`` is not set (depends on ``output_type``) @@ -133,7 +134,7 @@ def grd2xyz( >>> grid = pygmt.datasets.load_earth_relief( ... resolution="30m", region=[10, 30, 15, 25] ... ) - >>> # Create a pandas DataFrame with the xyz data from an input grid + >>> # Create a pandas.DataFrame with the xyz data from an input grid >>> xyz_dataframe = pygmt.grd2xyz(grid=grid, output_type="pandas") >>> xyz_dataframe.head(n=2) lon lat z @@ -143,12 +144,11 @@ def grd2xyz( output_type = validate_output_table_type(output_type, outfile=outfile) if kwargs.get("o") is not None and output_type == "pandas": - raise GMTInvalidInput( - "If 'outcols' is specified, 'output_type' must be either 'numpy'" - "or 'file'." + msg = ( + "If 'outcols' is specified, 'output_type' must be either 'numpy' or 'file'." ) - - # Set the default column names for the pandas dataframe header. + raise GMTInvalidInput(msg) + # Set the default column names for the pandas DataFrame header. column_names: list[str] = ["x", "y", "z"] # Let output pandas column names match input DataArray dimension names if output_type == "pandas" and isinstance(grid, xr.DataArray): diff --git a/pygmt/src/grdclip.py b/pygmt/src/grdclip.py index 912f66a5edc..558b9532a9d 100644 --- a/pygmt/src/grdclip.py +++ b/pygmt/src/grdclip.py @@ -67,7 +67,7 @@ def grdclip(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None: Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Example diff --git a/pygmt/src/grdcontour.py b/pygmt/src/grdcontour.py index 53c328f8264..86d16aafa19 100644 --- a/pygmt/src/grdcontour.py +++ b/pygmt/src/grdcontour.py @@ -40,7 +40,7 @@ def grdcontour(self, grid, **kwargs): r""" Convert grids or images to contours and plot them on maps. - Takes a grid file name or an xarray.DataArray object as input. + Takes a grid file name or an :class:`xarray.DataArray` object as input. Full option list at :gmt-docs:`grdcontour.html` diff --git a/pygmt/src/grdcut.py b/pygmt/src/grdcut.py index e2e4bc8c91c..d248d69ae27 100644 --- a/pygmt/src/grdcut.py +++ b/pygmt/src/grdcut.py @@ -85,7 +85,7 @@ def grdcut(grid, **kwargs) -> xr.DataArray | None: Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Example diff --git a/pygmt/src/grdfill.py b/pygmt/src/grdfill.py index eb24bb2bfdd..e59d2c03296 100644 --- a/pygmt/src/grdfill.py +++ b/pygmt/src/grdfill.py @@ -56,7 +56,7 @@ def grdfill(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None: Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Example @@ -69,7 +69,8 @@ def grdfill(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None: >>> filled_grid = pygmt.grdfill(grid=earth_relief_holes, mode="c20") """ if kwargs.get("A") is None and kwargs.get("L") is None: - raise GMTInvalidInput("At least parameter 'mode' or 'L' must be specified.") + msg = "At least parameter 'mode' or 'L' must be specified." + raise GMTInvalidInput(msg) with Session() as lib: with ( diff --git a/pygmt/src/grdfilter.py b/pygmt/src/grdfilter.py index e5a6ea04703..786e280dd61 100644 --- a/pygmt/src/grdfilter.py +++ b/pygmt/src/grdfilter.py @@ -48,13 +48,13 @@ def grdfilter(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None [/*width2*\][*modifiers*]. Name of the filter type you wish to apply, followed by the *width*: - - **b** - Box Car - - **c** - Cosine Arch - - **g** - Gaussian - - **o** - Operator - - **m** - Median - - **p** - Maximum Likelihood probability - - **h** - Histogram + - **b**: Box Car + - **c**: Cosine Arch + - **g**: Gaussian + - **o**: Operator + - **m**: Median + - **p**: Maximum Likelihood probability + - **h**: Histogram distance : str State how the grid (x,y) relates to the filter *width*: @@ -102,7 +102,7 @@ def grdfilter(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Examples diff --git a/pygmt/src/grdgradient.py b/pygmt/src/grdgradient.py index d90960dd8c1..96c4c61937f 100644 --- a/pygmt/src/grdgradient.py +++ b/pygmt/src/grdgradient.py @@ -69,11 +69,11 @@ def grdgradient(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | No Find the direction of the positive (up-slope) gradient of the data. The following options are supported: - - **a** - Find the aspect (i.e., the down-slope direction) - - **c** - Use the conventional Cartesian angles measured + - **a**: Find the aspect (i.e., the down-slope direction) + - **c**: Use the conventional Cartesian angles measured counterclockwise from the positive x (east) direction. - - **o** - Report orientations (0-180) rather than directions (0-360). - - **n** - Add 90 degrees to all angles (e.g., to give local strikes of + - **o**: Report orientations (0-180) rather than directions (0-360). + - **n**: Add 90 degrees to all angles (e.g., to give local strikes of the surface). radiance : str or list [**m**\|\ **s**\|\ **p**]\ *azim/elev*\ [**+a**\ *ambient*][**+d**\ @@ -102,14 +102,14 @@ def grdgradient(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | No given, it is set to the average of :math:`g`. The following forms are supported: - - **True** - Normalize using :math:`g_n = \mbox{{amp}}\ + - **True**: Normalize using :math:`g_n = \mbox{{amp}}\ (\frac{{g - \mbox{{offset}}}}{{max(|g - \mbox{{offset}}|)}})` - - **e** - Normalize using a cumulative Laplace distribution yielding: + - **e**: Normalize using a cumulative Laplace distribution yielding: :math:`g_n = \mbox{{amp}}(1 - \ \exp{{(\sqrt{{2}}\frac{{g - \mbox{{offset}}}}{{\sigma}}))}}`, where :math:`\sigma` is estimated using the L1 norm of :math:`(g - \mbox{{offset}})` if it is not given. - - **t** - Normalize using a cumulative Cauchy distribution yielding: + - **t**: Normalize using a cumulative Cauchy distribution yielding: :math:`g_n = \ \frac{{2(\mbox{{amp}})}}{{\pi}}(\tan^{{-1}}(\frac{{g - \ \mbox{{offset}}}}{{\sigma}}))` where :math:`\sigma` is estimated @@ -143,7 +143,7 @@ def grdgradient(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | No Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) @@ -159,12 +159,14 @@ def grdgradient(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | No >>> new_grid = pygmt.grdgradient(grid=grid, azimuth=10) """ if kwargs.get("Q") is not None and kwargs.get("N") is None: - raise GMTInvalidInput("""Must specify normalize if tiles is specified.""") + msg = "Must specify normalize if tiles is specified." + raise GMTInvalidInput(msg) if not args_in_kwargs(args=["A", "D", "E"], kwargs=kwargs): - raise GMTInvalidInput( + msg = ( "At least one of the following parameters must be specified: " "azimuth, direction, or radiance." ) + raise GMTInvalidInput(msg) with Session() as lib: with ( lib.virtualfile_in(check_kind="raster", data=grid) as vingrd, diff --git a/pygmt/src/grdhisteq.py b/pygmt/src/grdhisteq.py index 359eb0f589e..9f9b3e89320 100644 --- a/pygmt/src/grdhisteq.py +++ b/pygmt/src/grdhisteq.py @@ -99,9 +99,8 @@ def equalize_grid( ret Return type depends on the ``outgrid`` parameter: - - xarray.DataArray if ``outgrid`` is None - - None if ``outgrid`` is a str (grid output is stored in - ``outgrid``) + - :class:`xarray.DataArray` if ``outgrid`` is ``None`` + - ``None`` if ``outgrid`` is a str (grid output is stored in ``outgrid``) Example ------- @@ -227,7 +226,8 @@ def compute_bins( output_type = validate_output_table_type(output_type, outfile=outfile) if kwargs.get("h") is not None and output_type != "file": - raise GMTInvalidInput("'header' is only allowed with output_type='file'.") + msg = "'header' is only allowed with output_type='file'." + raise GMTInvalidInput(msg) with Session() as lib: with ( diff --git a/pygmt/src/grdimage.py b/pygmt/src/grdimage.py index a1ba783ab6c..198e76b41cb 100644 --- a/pygmt/src/grdimage.py +++ b/pygmt/src/grdimage.py @@ -103,7 +103,7 @@ def grdimage(self, grid, **kwargs): paint the mask with the given color. Append **+b** to paint the background pixels (1) or **+f** for the foreground pixels [Default is **+f**]. - shading : str or xarray.DataArray + shading : str or :class:`xarray.DataArray` [*intensfile*\|\ *intensity*\|\ *modifiers*]. Give the name of a grid file or a DataArray with intensities in the (-1,+1) range, or a constant intensity to apply everywhere (affects the @@ -158,10 +158,11 @@ def grdimage(self, grid, **kwargs): # Do not support -A option if any(kwargs.get(arg) is not None for arg in ["A", "img_out"]): - raise GMTInvalidInput( + msg = ( "Parameter 'img_out'/'A' is not implemented. " "Please consider submitting a feature request to us." ) + raise GMTInvalidInput(msg) with Session() as lib: with ( diff --git a/pygmt/src/grdinfo.py b/pygmt/src/grdinfo.py index 7ebb8bfdd03..3194165e528 100644 --- a/pygmt/src/grdinfo.py +++ b/pygmt/src/grdinfo.py @@ -30,7 +30,7 @@ def grdinfo(grid, **kwargs): r""" Get information about a grid. - Can read the grid from a file or given as an xarray.DataArray grid. + Can read the grid from a file or given as an :class:`xarray.DataArray` grid. Full option list at :gmt-docs:`grdinfo.html` diff --git a/pygmt/src/grdlandmask.py b/pygmt/src/grdlandmask.py index 5244c89eca2..99b64f799b8 100644 --- a/pygmt/src/grdlandmask.py +++ b/pygmt/src/grdlandmask.py @@ -85,7 +85,7 @@ def grdlandmask(outgrid: str | None = None, **kwargs) -> xr.DataArray | None: Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Example @@ -96,7 +96,8 @@ def grdlandmask(outgrid: str | None = None, **kwargs) -> xr.DataArray | None: >>> landmask = pygmt.grdlandmask(spacing=1, region=[125, 130, 30, 35]) """ if kwargs.get("I") is None or kwargs.get("R") is None: - raise GMTInvalidInput("Both 'region' and 'spacing' must be specified.") + msg = "Both 'region' and 'spacing' must be specified." + raise GMTInvalidInput(msg) with Session() as lib: with lib.virtualfile_out(kind="grid", fname=outgrid) as voutgrd: diff --git a/pygmt/src/grdproject.py b/pygmt/src/grdproject.py index 617b62d07f1..d49300b3ea5 100644 --- a/pygmt/src/grdproject.py +++ b/pygmt/src/grdproject.py @@ -53,8 +53,8 @@ def grdproject(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | Non {grid} {outgrid} inverse : bool - When set to ``True`` transforms grid from rectangular to - geographical [Default is False]. + When set to ``True`` transforms grid from rectangular to geographical + [Default is ``False``]. {projection} {region} center : str or list @@ -63,7 +63,7 @@ def grdproject(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | Non is relative to lower left corner]. Optionally, add offsets in the projected units to be added (or subtracted when ``inverse`` is set) to (from) the projected coordinates, such as false eastings and - northings for particular projection zones [0/0]. + northings for particular projection zones [Default is ``[0, 0]``]. {spacing} dpi : int Set the resolution for the new grid in dots per inch. @@ -89,7 +89,7 @@ def grdproject(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | Non Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Example @@ -103,7 +103,8 @@ def grdproject(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | Non >>> new_grid = pygmt.grdproject(grid=grid, projection="M10c", region=region) """ if kwargs.get("J") is None: - raise GMTInvalidInput("The projection must be specified.") + msg = "The projection must be specified." + raise GMTInvalidInput(msg) with Session() as lib: with ( diff --git a/pygmt/src/grdsample.py b/pygmt/src/grdsample.py index dfa77ced476..2e822dd939e 100644 --- a/pygmt/src/grdsample.py +++ b/pygmt/src/grdsample.py @@ -65,7 +65,7 @@ def grdsample(grid, outgrid: str | None = None, **kwargs) -> xr.DataArray | None Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray` if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Example diff --git a/pygmt/src/grdtrack.py b/pygmt/src/grdtrack.py index c8693308edf..521448b1867 100644 --- a/pygmt/src/grdtrack.py +++ b/pygmt/src/grdtrack.py @@ -191,7 +191,7 @@ def grdtrack( {region} no_skip : bool Do *not* skip points that fall outside the domain of the grid(s) - [Default only output points within grid domain]. + [Default only output points within the grid domain]. stack : str or list *method*/*modifiers*. In conjunction with ``crossprofile``, compute a single stacked profile @@ -290,13 +290,16 @@ def grdtrack( ... ) """ if points is not None and kwargs.get("E") is not None: - raise GMTInvalidInput("Can't set both 'points' and 'profile'.") + msg = "Can't set both 'points' and 'profile'." + raise GMTInvalidInput(msg) if points is None and kwargs.get("E") is None: - raise GMTInvalidInput("Must give 'points' or set 'profile'.") + msg = "Must give 'points' or set 'profile'." + raise GMTInvalidInput(msg) if hasattr(points, "columns") and newcolname is None: - raise GMTInvalidInput("Please pass in a str to 'newcolname'") + msg = "Please pass in a str to 'newcolname'." + raise GMTInvalidInput(msg) output_type = validate_output_table_type(output_type, outfile=outfile) diff --git a/pygmt/src/grdview.py b/pygmt/src/grdview.py index 5e6256a0b1f..77ace573a30 100644 --- a/pygmt/src/grdview.py +++ b/pygmt/src/grdview.py @@ -61,7 +61,7 @@ def grdview(self, grid, **kwargs): {frame} cmap : str The name of the color palette table to use. - drapegrid : str or xarray.DataArray + drapegrid : str or :class:`xarray.DataArray` The file name or a DataArray of the image grid to be draped on top of the relief provided by ``grid`` [Default determines colors from grid]. Note that ``zscale`` and ``plane`` always refer to the grid. @@ -76,12 +76,12 @@ def grdview(self, grid, **kwargs): Specify cover type of the grid. Select one of following settings: - - **m** - mesh plot [Default]. - - **mx** or **my** - waterfall plots (row or column profiles). - - **s** - surface plot, and optionally append **m** to have mesh lines + - **m**: mesh plot [Default]. + - **mx** or **my**: waterfall plots (row or column profiles). + - **s**: surface plot, and optionally append **m** to have mesh lines drawn on top of the surface. - - **i** - image plot. - - **c** - Same as **i** but will make nodes with z = NaN transparent. + - **i**: image plot. + - **c**: Same as **i** but will make nodes with z = NaN transparent. For any of these choices, you may force a monochrome image by appending the modifier **+m**. diff --git a/pygmt/src/histogram.py b/pygmt/src/histogram.py index 54cc94283c9..c791ee4c093 100644 --- a/pygmt/src/histogram.py +++ b/pygmt/src/histogram.py @@ -1,5 +1,5 @@ """ -Histogram - Create a histogram +Histogram - Create a histogram. """ from pygmt.clib import Session @@ -104,8 +104,9 @@ def histogram(self, data, **kwargs): Draw a stairs-step diagram which does not include the internal bars of the default histogram. horizontal : bool - Plot the histogram using horizontal bars instead of the - default vertical bars. + Plot the histogram horizontally from x = 0 [Default is vertically from y = 0]. + The plot dimensions remain the same, but the two axes are flipped, i.e., the + x-axis is plotted vertically and the y-axis is plotted horizontally. series : int, str, or list [*min*\ /*max*\ /]\ *inc*\ [**+n**\ ]. Set the interval for the width of each bar in the histogram. diff --git a/pygmt/src/hlines.py b/pygmt/src/hlines.py new file mode 100644 index 00000000000..e2c390b4eea --- /dev/null +++ b/pygmt/src/hlines.py @@ -0,0 +1,138 @@ +""" +hlines - Plot horizontal lines. +""" + +from collections.abc import Sequence + +import numpy as np +from pygmt.exceptions import GMTInvalidInput + +__doctest_skip__ = ["hlines"] + + +def hlines( + self, + y: float | Sequence[float], + xmin: float | Sequence[float] | None = None, + xmax: float | Sequence[float] | None = None, + pen: str | None = None, + label: str | None = None, + no_clip: bool = False, + perspective: str | bool | None = None, +): + """ + Plot one or multiple horizontal line(s). + + This method is a high-level wrapper around :meth:`pygmt.Figure.plot` that focuses on + plotting horizontal lines at Y-coordinates specified by the ``y`` parameter. The + ``y`` parameter can be a single value (for a single horizontal line) or a sequence + of values (for multiple horizontal lines). + + By default, the X-coordinates of the start and end points of the lines are set to + be the X-limits of the current plot, but this can be overridden by specifying the + ``xmin`` and ``xmax`` parameters. ``xmin`` and ``xmax`` can be either a single + value or a sequence of values. If a single value is provided, it is applied to all + lines. If a sequence is provided, the length of ``xmin`` and ``xmax`` must match + the length of ``y``. + + The term "horizontal" lines can be interpreted differently in different coordinate + systems: + + - **Cartesian**: lines are plotted as straight lines. + - **Polar**: lines are plotted as arcs along a constant radius. + - **Geographic**: lines are plotted as arcs along parallels (i.e., constant + latitude). + + Parameters + ---------- + y + Y-coordinates to plot the lines. It can be a single value (for a single line) + or a sequence of values (for multiple lines). + xmin/xmax + X-coordinates of the start/end point(s) of the line(s). If ``None``, defaults to + the X-limits of the current plot. ``xmin`` and ``xmax`` can be either a single + value or a sequence of values. If a single value is provided, it is applied to + all lines. If a sequence is provided, the length of ``xmin`` and ``xmax`` must + match the length of ``y``. + pen + Pen attributes for the line(s), in the format of *width,color,style*. + label + Label for the line(s), to be displayed in the legend. + no_clip + If ``True``, do not clip lines outside the plot region. Only makes sense in the + Cartesian coordinate system. + perspective + Select perspective view and set the azimuth and elevation angle of the + viewpoint. Refer to :meth:`pygmt.Figure.plot` for details. + + Examples + -------- + >>> import pygmt + >>> fig = pygmt.Figure() + >>> fig.basemap(region=[0, 10, 0, 10], projection="X10c/10c", frame=True) + >>> fig.hlines(y=1, pen="1p,black", label="Line at y=1") + >>> fig.hlines(y=2, xmin=2, xmax=8, pen="1p,red,-", label="Line at y=2") + >>> fig.hlines(y=[3, 4], xmin=3, xmax=7, pen="1p,black,.", label="Lines at y=3,4") + >>> fig.hlines(y=[5, 6], xmin=4, xmax=9, pen="1p,red", label="Lines at y=5,6") + >>> fig.hlines( + ... y=[7, 8], xmin=[0, 1], xmax=[7, 8], pen="1p,blue", label="Lines at y=7,8" + ... ) + >>> fig.legend() + >>> fig.show() + """ + self._preprocess() + + # Determine the x limits from the current plot region if not specified. + if xmin is None or xmax is None: + xlimits = self.region[:2] + if xmin is None: + xmin = xlimits[0] + if xmax is None: + xmax = xlimits[1] + + # Ensure y/xmin/xmax are 1-D arrays. + _y = np.atleast_1d(y) + _xmin = np.atleast_1d(xmin) + _xmax = np.atleast_1d(xmax) + + nlines = len(_y) # Number of lines to plot. + + # Check if xmin/xmax are scalars or have the expected length. + if _xmin.size not in {1, nlines} or _xmax.size not in {1, nlines}: + msg = ( + f"'xmin' and 'xmax' are expected to be scalars or have lengths '{nlines}', " + f"but lengths '{_xmin.size}' and '{_xmax.size}' are given." + ) + raise GMTInvalidInput(msg) + + # Repeat xmin/xmax to match the length of y if they are scalars. + if nlines != 1: + if _xmin.size == 1: + _xmin = np.repeat(_xmin, nlines) + if _xmax.size == 1: + _xmax = np.repeat(_xmax, nlines) + + # Call the Figure.plot method to plot the lines. + for i in range(nlines): + # Special handling for label. + # 1. Only specify a label when plotting the first line. + # 2. The -l option can accept comma-separated labels for labeling multiple lines + # with auto-coloring enabled. We don't need this feature here, so we need to + # replace comma with \054 if the label contains commas. + _label = label.replace(",", "\\054") if label and i == 0 else None + + # By default, points are connected as great circle arcs in geographic coordinate + # systems and straight lines in Cartesian coordinate systems (including polar + # projection). To plot "horizontal" lines along constant latitude (in geographic + # coordinate systems) or constant radius (in polar projection), we need to + # resample the line to at least 4 points. + npoints = 4 # 2 for Cartesian, at least 4 for geographic and polar projections. + self.plot( + x=np.linspace(_xmin[i], _xmax[i], npoints), + y=[_y[i]] * npoints, + pen=pen, + label=_label, + no_clip=no_clip, + perspective=perspective, + straight_line="x", + ) diff --git a/pygmt/src/info.py b/pygmt/src/info.py index 308a84256bf..46b41fa3c3b 100644 --- a/pygmt/src/info.py +++ b/pygmt/src/info.py @@ -34,12 +34,12 @@ def info(data, **kwargs): the number of columns vary from record to record. As an option, it will find the extent of the first two columns rounded up and down to the nearest multiple of the supplied increments given by ``spacing``. Such output will - be in a numpy.ndarray form [*w*, *e*, *s*, *n*], which can be used + be in a :class:`numpy.ndarray` form [*w*, *e*, *s*, *n*], which can be used directly as the ``region`` parameter for other modules (hence only *dx* and *dy* are needed). If the ``per_column`` parameter is combined with - ``spacing``, then the numpy.ndarray output will be rounded up/down for as + ``spacing``, then the :class:`numpy.ndarray` output will be rounded up/down for as many columns as there are increments provided in ``spacing``. A similar - parameter ``nearest_multiple`` will provide a numpy.ndarray in the form + parameter ``nearest_multiple`` will provide a :class:`numpy.ndarray` in the form of [*zmin*, *zmax*, *dz*] for makecpt. Full option list at :gmt-docs:`gmtinfo.html` @@ -73,7 +73,7 @@ def info(data, **kwargs): Returns ------- - output : np.ndarray or str + output : :class:`numpy.ndarray` or str Return type depends on whether any of the ``per_column``, ``spacing``, or ``nearest_multiple`` parameters are set. diff --git a/pygmt/src/legend.py b/pygmt/src/legend.py index f6e2d61f34f..b7676eeb66c 100644 --- a/pygmt/src/legend.py +++ b/pygmt/src/legend.py @@ -92,9 +92,11 @@ def legend( kind = data_kind(spec) if kind not in {"empty", "file", "stringio"}: - raise GMTInvalidInput(f"Unrecognized data type: {type(spec)}") + msg = f"Unrecognized data type: {type(spec)}" + raise GMTInvalidInput(msg) if kind == "file" and is_nonstr_iter(spec): - raise GMTInvalidInput("Only one legend specification file is allowed.") + msg = "Only one legend specification file is allowed." + raise GMTInvalidInput(msg) with Session() as lib: with lib.virtualfile_in(data=spec, required_data=False) as vintbl: diff --git a/pygmt/src/makecpt.py b/pygmt/src/makecpt.py index d128e1f7aff..97a09269ecd 100644 --- a/pygmt/src/makecpt.py +++ b/pygmt/src/makecpt.py @@ -154,7 +154,8 @@ def makecpt(**kwargs): ``categorical=True``. """ if kwargs.get("W") is not None and kwargs.get("Ww") is not None: - raise GMTInvalidInput("Set only categorical or cyclic to True, not both.") + msg = "Set only categorical or cyclic to True, not both." + raise GMTInvalidInput(msg) if (output := kwargs.pop("H", None)) is not None: kwargs["H"] = True diff --git a/pygmt/src/meca.py b/pygmt/src/meca.py index 9822dae1f23..ba6de1f7868 100644 --- a/pygmt/src/meca.py +++ b/pygmt/src/meca.py @@ -91,13 +91,13 @@ def convention_code(convention, component="full"): return codes1[convention] if convention in codes2: if component not in codes2[convention]: - raise GMTInvalidInput( - f"Invalid component '{component}' for convention '{convention}'." - ) + msg = f"Invalid component '{component}' for convention '{convention}'." + raise GMTInvalidInput(msg) return codes2[convention][component] if convention in {"a", "c", "m", "d", "z", "p", "x", "y", "t"}: return convention - raise GMTInvalidInput(f"Invalid convention '{convention}'.") + msg = f"Invalid convention '{convention}'." + raise GMTInvalidInput(msg) def convention_name(code): @@ -227,168 +227,145 @@ def meca( # noqa: PLR0912, PLR0913, PLR0915 Parameters ---------- - spec : str, 1-D array, 2-D array, dict, or pd.DataFrame + spec : str, 1-D numpy array, 2-D numpy array, dict, or pandas.DataFrame Data that contain focal mechanism parameters. ``spec`` can be specified in either of the following types: - - *str*: a file name containing focal mechanism parameters as - columns. The meaning of each column is: + - *str*: a file name containing focal mechanism parameters as columns. The + meaning of each column is: - Columns 1 and 2: event longitude and latitude - - Column 3: event depth (in km) - - Columns 4 to 3+n: focal mechanism parameters. The number of columns - *n* depends on the choice of ``convention``, which will be - described below. - - Columns 4+n and 5+n: longitude, latitude at which to place - beachball. Using ``0 0`` will plot the beachball at the longitude, - latitude given in columns 1 and 2. [optional and requires - ``offset=True`` to take effect]. - - Text string to appear near the beachball [optional]. - - - *1-D array*: focal mechanism parameters of a single event. + - Column 3: event depth (in kilometers) + - Columns 4 to 3+n: focal mechanism parameters. The number of columns *n* + depends on the choice of ``convention``, which is described below. + - Columns 4+n and 5+n: longitude and latitude at which to place the + beachball. ``0 0`` plots the beachball at the longitude and latitude + given in the columns 1 and 2. [optional; requires ``offset=True``]. + - Last Column: text string to appear near the beachball [optional]. + + - *1-D np.array*: focal mechanism parameters of a single event. The meanings of columns are the same as above. - - *2-D array*: focal mechanism parameters of multiple events. + - *2-D np.array*: focal mechanism parameters of multiple events. The meanings of columns are the same as above. - - *dictionary or pd.DataFrame*: The dictionary keys or pd.DataFrame - column names determine the focal mechanism convention. For - different conventions, the following combination of keys are allowed: - - - ``"aki"``: *strike, dip, rake, magnitude* - - ``"gcmt"``: *strike1, dip1, rake1, strike2, dip2, rake2, mantissa,* - *exponent* - - ``"mt"``: *mrr, mtt, mff, mrt, mrf, mtf, exponent* - - ``"partial"``: *strike1, dip1, strike2, fault_type, magnitude* - - ``"principal_axis"``: *t_value, t_azimuth, t_plunge, n_value, - n_azimuth, n_plunge, p_value, p_azimuth, p_plunge, exponent* - - A dictionary may contain values for a single focal mechanism or - lists of values for multiple focal mechanisms. - - Both dictionary and pd.DataFrame may optionally contain - keys/column names: ``latitude``, ``longitude``, ``depth``, - ``plot_longitude``, ``plot_latitude``, and/or ``event_name``. - - If ``spec`` is either a str, a 1-D array or a 2-D array, the - ``convention`` parameter is required so we know how to interpret the - columns. If ``spec`` is a dictionary or a pd.DataFrame, - ``convention`` is not needed and is ignored if specified. + - *dict* or :class:`pandas.DataFrame`: The dict keys or + :class:`pandas.DataFrame` column names determine the focal mechanism + convention. For the different conventions, the following combination of + keys / column names are required: + + - ``"aki"``: *strike*, *dip*, *rake*, *magnitude* + - ``"gcmt"``: *strike1*, *dip1*, *rake1*, *strike2*, *dip2*, *rake2*, + *mantissa*, *exponent* + - ``"mt"``: *mrr*, *mtt*, *mff*, *mrt*, *mrf*, *mtf*, *exponent* + - ``"partial"``: *strike1*, *dip1*, *strike2*, *fault_type*, *magnitude* + - ``"principal_axis"``: *t_value*, *t_azimuth*, *t_plunge*, *n_value*, + *n_azimuth*, *n_plunge*, *p_value*, *p_azimuth*, *p_plunge*, *exponent* + + A dict may contain values for a single focal mechanism or lists of + values for multiple focal mechanisms. + + Both dict and :class:`pandas.DataFrame` may optionally contain the keys / + column names: ``latitude``, ``longitude``, ``depth``, ``plot_longitude``, + ``plot_latitude``, and/or ``event_name``. + + If ``spec`` is either a str or a 1-D or 2-D numpy array, the ``convention`` + parameter is required to interpret the columns. If ``spec`` is a dict or + a :class:`pandas.DataFrame`, ``convention`` is not needed and ignored if + specified. scale : float or str *scale*\ [**+a**\ *angle*][**+f**\ *font*][**+j**\ *justify*]\ [**+l**][**+m**][**+o**\ *dx*\ [/\ *dy*]][**+s**\ *reference*]. - Adjust scaling of the radius of the beachball, which is - proportional to the magnitude. By default, *scale* defines the - size for magnitude = 5 (i.e., scalar seismic moment - M0 = 4.0E23 dynes-cm). If **+l** is used the radius will be - proportional to the seismic moment instead. Use **+s** and give - a *reference* to change the reference magnitude (or moment), and - use **+m** to plot all beachballs with the same size. A text - string can be specified to appear near the beachball - (corresponding to column or parameter ``event_name``). - Append **+a**\ *angle* to change the angle of the text string; - append **+f**\ *font* to change its font (size,fontname,color); - append **+j**\ *justify* to change the text location relative - to the beachball [Default is ``"TC"``, i.e., Top Center]; - append **+o** to offset the text string by *dx*\ /*dy*. + Adjust scaling of the radius of the beachball, which is proportional to the + magnitude. By default, *scale* defines the size for magnitude = 5 (i.e., scalar + seismic moment M0 = 4.0E23 dynes-cm). If **+l** is used the radius will be + proportional to the seismic moment instead. Use **+s** and give a *reference* + to change the reference magnitude (or moment), and use **+m** to plot all + beachballs with the same size. A text string can be specified to appear near + the beachball (corresponding to column or parameter ``event_name``). Append + **+a**\ *angle* to change the angle of the text string; append **+f**\ *font* + to change its font (size,fontname,color); append **+j**\ *justify* to change + the text location relative to the beachball [Default is ``"TC"``, i.e., Top + Center]; append **+o** to offset the text string by *dx*\ /*dy*. convention : str Focal mechanism convention. Choose from: - - ``"aki"`` (Aki & Richards) + - ``"aki"`` (Aki and Richards) - ``"gcmt"`` (global CMT) - ``"mt"`` (seismic moment tensor) - ``"partial"`` (partial focal mechanism) - ``"principal_axis"`` (principal axis) - Ignored if ``spec`` is a dictionary or pd.DataFrame. + Ignored if ``spec`` is a dict or :class:`pandas.DataFrame`. component : str The component of the seismic moment tensor to plot. - ``"full"``: the full seismic moment tensor - - ``"dc"``: the closest double couple defined from the moment tensor - (zero trace and zero determinant) + - ``"dc"``: the closest double couple defined from the moment tensor (zero + trace and zero determinant) - ``"deviatoric"``: deviatoric part of the moment tensor (zero trace) - longitude : float, list, or 1-D numpy array - Longitude(s) of event location(s). Must be the same length as the - number of events. Will override the ``longitude`` values - in ``spec`` if ``spec`` is a dictionary or pd.DataFrame. - latitude : float, list, or 1-D numpy array - Latitude(s) of event location(s). Must be the same length as the - number of events. Will override the ``latitude`` values - in ``spec`` if ``spec`` is a dictionary or pd.DataFrame. - depth : float, list, or 1-D numpy array - Depth(s) of event location(s) in kilometers. Must be the same length - as the number of events. Will override the ``depth`` values in ``spec`` - if ``spec`` is a dictionary or pd.DataFrame. - plot_longitude : float, str, list, or 1-D numpy array - Longitude(s) at which to place beachball(s). Must be the same length - as the number of events. Will override the ``plot_longitude`` values - in ``spec`` if ``spec`` is a dictionary or pd.DataFrame. - plot_latitude : float, str, list, or 1-D numpy array - Latitude(s) at which to place beachball(s). List must be the same - length as the number of events. Will override the ``plot_latitude`` - values in ``spec`` if ``spec`` is a dictionary or pd.DataFrame. + longitude/latitude/depth : float, list, or 1-D numpy array + Longitude(s) / latitude(s) / depth(s) of the event(s). Length must match the + number of events. Overrides the ``longitude`` / ``latitude`` / ``depth`` values + in ``spec`` if ``spec`` is a dict or :class:`pandas.DataFrame`. + plot_longitude/plot_latitude : float, str, list, or 1-D numpy array + Longitude(s) / Latitude(s) at which to place the beachball(s). Length must match + the number of events. Overrides the ``plot_longitude`` / ``plot_latitude`` + values in ``spec`` if ``spec`` is a dict or :class:`pandas.DataFrame`. event_name : str, list of str, or 1-D numpy array - Text string(s), e.g., event name(s) to appear near the beachball(s). - List must be the same length as the number of events. Will override - the ``event_name`` labels in ``spec`` if ``spec`` is a dictionary - or pd.DataFrame. + Text string(s), e.g., event name(s) to appear near the beachball(s). Length + must match the number of events. Overrides the ``event_name`` labels in ``spec`` + if ``spec`` is a dict or :class:`pandas.DataFrame`. labelbox : bool or str [*fill*]. - Draw a box behind the label if given. Use *fill* to give a fill color - [Default is ``"white"``]. + Draw a box behind the label if given via ``event_name``. Use *fill* to give a + fill color [Default is ``"white"``]. offset : bool or str [**+p**\ *pen*][**+s**\ *size*]. - Offset beachball(s) to longitude(s) and latitude(s) specified in the - the last two columns of the input file or array, or by - ``plot_longitude`` and ``plot_latitude`` if provided. A small circle - is plotted at the initial location and a line connects the beachball - to the circle. Use **+s**\ *size* to set the diameter of the circle - [Default is no circle]. Use **+p**\ *pen* to set the pen attributes - for this feature [Default is set via ``pen``]. The fill of the - circle is set via ``compressionfill`` or ``cmap``, i.e., - corresponds to the fill of the compressive quadrants. + Offset beachball(s) to the longitude(s) and latitude(s) specified in the last + two columns of the input file or array, or by ``plot_longitude`` and + ``plot_latitude`` if provided. A line from the beachball to the initial location + is drawn. Use **+s**\ *size* to plot a small circle at the initial location and + to set the diameter of this circle [Default is no circle]. Use **+p**\ *pen* to + set the pen attributes for this feature [Default is set via ``pen``]. The fill + of the circle is set via ``compressionfill`` or ``cmap``, i.e., corresponds to + the fill of the compressive quadrants. compressionfill : str - Set color or pattern for filling compressive quadrants - [Default is ``"black"``]. This setting also applies to the fill of - the circle defined via ``offset``. + Set color or pattern for filling compressive quadrants [Default is ``"black"``]. + This setting also applies to the fill of the circle defined via ``offset``. extensionfill : str - Set color or pattern for filling extensive quadrants - [Default is ``"white"``]. + Set color or pattern for filling extensive quadrants [Default is ``"white"``]. pen : str - Set pen attributes for all lines related to beachball [Default is - ``"0.25p,black,solid"``]. This setting applies to ``outline``, - ``nodal``, and ``offset``, unless overruled by arguments passed to - those parameters. Draws circumference of beachball. + Set (default) pen attributes for all lines related to the beachball [Default is + ``"0.25p,black,solid"``]. This setting applies to ``outline``, ``nodal``, and + ``offset``, unless overruled by arguments passed to those parameters. Draws the + circumference of the beachball. outline : bool or str [*pen*]. - Draw circumference and nodal planes of beachball. Use *pen* to set - the pen attributes for this feature [Default is set via ``pen``]. + Draw circumference and nodal planes of the beachball. Use *pen* to set the pen + attributes for this feature [Default is set via ``pen``]. nodal : bool, int, or str [*nplane*][/*pen*]. - Plot the nodal planes and outline the bubble which is transparent. - If *nplane* is + Plot the nodal planes and outline the bubble which is transparent. If *nplane* + is - ``0`` or ``True``: both nodal planes are plotted [Default]. - ``1``: only the first nodal plane is plotted. - ``2``: only the second nodal plane is plotted. - Use /*pen* to set the pen attributes for this feature [Default is - set via ``pen``]. - For double couple mechanisms, ``nodal`` renders the beachball - transparent by drawing only the nodal planes and the circumference. - For non-double couple mechanisms, ``nodal=0`` overlays best - double couple transparently. + Use /*pen* to set the pen attributes for this feature [Default is set via + ``pen``]. + For double couple mechanisms, ``nodal`` renders the beachball transparent by + drawing only the nodal planes and the circumference. For non-double couple + mechanisms, ``nodal=0`` overlays best double couple transparently. cmap : str File name of a CPT file or a series of comma-separated colors (e.g., - *color1,color2,color3*) to build a linear continuous CPT from those - colors automatically. The color of the compressive quadrants is - determined by the z-value (i.e., event depth or the third column for - an input file). This setting also applies to the fill of the circle - defined via ``offset``. + *color1,color2,color3*) to build a linear continuous CPT from those colors + automatically. The color of the compressive quadrants is determined by the + z-value (i.e., event depth or the third column for an input file). This setting + also applies to the fill of the circle defined via ``offset``. no_clip : bool - Do **not** skip symbols that fall outside the frame boundaries - [Default is ``False``, i.e., plot symbols inside the frame - boundaries only]. + Do **not** skip symbols that fall outside the frame boundaries [Default is + ``False``, i.e., plot symbols inside the frame boundaries only]. {projection} {region} {frame} @@ -423,7 +400,8 @@ def meca( # noqa: PLR0912, PLR0913, PLR0915 ) elif isinstance(spec, np.ndarray): # spec is a numpy array if convention is None: - raise GMTInvalidInput("'convention' must be specified for an array input.") + msg = "'convention' must be specified for an array input." + raise GMTInvalidInput(msg) # make sure convention is a name, not a code convention = convention_name(convention) @@ -441,9 +419,10 @@ def meca( # noqa: PLR0912, PLR0913, PLR0915 elif ncolsdiff == 3: colnames += ["plot_longitude", "plot_latitude", "event_name"] else: - raise GMTInvalidInput( + msg = ( f"Input array must have {len(colnames)} to {len(colnames) + 3} columns." ) + raise GMTInvalidInput(msg) spec.columns = colnames # Now spec is a pd.DataFrame or a file diff --git a/pygmt/src/nearneighbor.py b/pygmt/src/nearneighbor.py index f8bcac0d58e..94cb02bdf68 100644 --- a/pygmt/src/nearneighbor.py +++ b/pygmt/src/nearneighbor.py @@ -122,7 +122,7 @@ def nearneighbor( Return type depends on whether the ``outgrid`` parameter is set: - :class:`xarray.DataArray`: if ``outgrid`` is not set - - None if ``outgrid`` is set (grid output will be stored in file set by + - ``None`` if ``outgrid`` is set (grid output will be stored in the file set by ``outgrid``) Example ------- diff --git a/pygmt/src/plot.py b/pygmt/src/plot.py index 2767da138d2..23c5bde12fd 100644 --- a/pygmt/src/plot.py +++ b/pygmt/src/plot.py @@ -2,6 +2,8 @@ plot - Plot in two dimensions. """ +from typing import Literal + from pygmt.clib import Session from pygmt.exceptions import GMTInvalidInput from pygmt.helpers import ( @@ -49,7 +51,15 @@ ) @kwargs_to_strings(R="sequence", c="sequence_comma", i="sequence_comma", p="sequence") def plot( - self, data=None, x=None, y=None, size=None, symbol=None, direction=None, **kwargs + self, + data=None, + x=None, + y=None, + size=None, + symbol=None, + direction=None, + straight_line: bool | Literal["x", "y"] = False, # noqa: ARG001 + **kwargs, ): r""" Plot lines, polygons, and symbols in 2-D. @@ -98,18 +108,29 @@ def plot( depending on the style options chosen. {projection} {region} - straight_line : bool or str - [**m**\|\ **p**\|\ **x**\|\ **y**]. - By default, geographic line segments are drawn as great circle - arcs. To draw them as straight lines, use - ``straight_line=True``. - Alternatively, add **m** to draw the line by first following a - meridian, then a parallel. Or append **p** to start following a - parallel, then a meridian. (This can be practical to draw a line - along parallels, for example). For Cartesian data, points are - simply connected, unless you append **x** or **y** to draw - stair-case curves that whose first move is along *x* or *y*, - respectively. + straight_line + By default, line segments are drawn as straight lines in the Cartesian and polar + coordinate systems, and as great circle arcs (by resampling coarse input data + along such arcs) in the geographic coordinate system. The ``straight_line`` + parameter can control the drawing of line segments. Valid values are: + + - ``True``: Draw line segments as straight lines in geographic coordinate + systems. + - ``"x"``: Draw line segments by first along *x*, then along *y*. + - ``"y"``: Draw line segments by first along *y*, then along *x*. + + Here, *x* and *y* have different meanings depending on the coordinate system: + + - **Cartesian** coordinate system: *x* and *y* are the X- and Y-axes. + - **Polar** coordinate system: *x* and *y* are theta and radius. + - **Geographic** coordinate system: *x* and *y* are parallels and meridians. + + .. attention:: + + There exits a bug in GMT<=6.5.0 that, in geographic coordinate systems, the + meaning of *x* and *y* is reversed, i.e., *x* means meridians and *y* means + parallels. The bug is fixed by upstream + `PR #8648 `__. {frame} {cmap} offset : str @@ -206,6 +227,8 @@ def plot( ``x``/``y``. {wrap} """ + # TODO(GMT>6.5.0): Remove the note for the upstream bug of the "straight_line" + # parameter. kwargs = self._preprocess(**kwargs) kind = data_kind(data) @@ -246,7 +269,8 @@ def plot( ("symbol", symbol), ]: if is_nonstr_iter(value): - raise GMTInvalidInput(f"'{name}' can't be 1-D array if 'data' is used.") + msg = f"'{name}' can't be a 1-D array if 'data' is used." + raise GMTInvalidInput(msg) # Set the default style if data has a geometry of Point or MultiPoint if kwargs.get("S") is None and _data_geometry_is_point(data, kind): diff --git a/pygmt/src/plot3d.py b/pygmt/src/plot3d.py index 3a070a53b52..e8e75382d74 100644 --- a/pygmt/src/plot3d.py +++ b/pygmt/src/plot3d.py @@ -2,6 +2,8 @@ plot3d - Plot in three dimensions. """ +from typing import Literal + from pygmt.clib import Session from pygmt.exceptions import GMTInvalidInput from pygmt.helpers import ( @@ -58,6 +60,7 @@ def plot3d( size=None, symbol=None, direction=None, + straight_line: bool | Literal["x", "y"] = False, # noqa: ARG001 **kwargs, ): r""" @@ -108,18 +111,31 @@ def plot3d( zscale/zsize : float or str Set z-axis scaling or z-axis size. {region} - straight_line : bool or str - [**m**\|\ **p**\|\ **x**\|\ **y**]. - By default, geographic line segments are drawn as great circle - arcs. To draw them as straight lines, use ``straight_line``. - Alternatively, add **m** to draw the line by first following a - meridian, then a parallel. Or append **p** to start following a - parallel, then a meridian. (This can be practical to draw a line - along parallels, for example). For Cartesian data, points are - simply connected, unless you append **x** or **y** to draw - stair-case curves that whose first move is along *x* or *y*, - respectively. **Note**: The ``straight_line`` parameter requires - constant *z*-coordinates. + straight_line + By default, line segments are drawn as straight lines in the Cartesian and polar + coordinate systems, and as great circle arcs (by resampling coarse input data + along such arcs) in the geographic coordinate system. The ``straight_line`` + parameter can control the drawing of line segments. Valid values are: + + - ``True``: Draw line segments as straight lines in geographic coordinate + systems. + - ``"x"``: Draw line segments by first along *x*, then along *y*. + - ``"y"``: Draw line segments by first along *y*, then along *x*. + + Here, *x* and *y* have different meanings depending on the coordinate system: + + - **Cartesian** coordinate system: *x* and *y* are the X- and Y-axes. + - **Polar** coordinate system: *x* and *y* are theta and radius. + - **Geographic** coordinate system: *x* and *y* are parallels and meridians. + + **NOTE**: The ``straight_line`` parameter requires constant *z*-coordinates. + + .. attention:: + + There exits a bug in GMT<=6.5.0 that, in geographic coordinate systems, the + meaning of *x* and *y* is reversed, i.e., *x* means meridians and *y* means + parallels. The bug is fixed by upstream + `PR #8648 `__. {frame} {cmap} offset : str @@ -189,6 +205,8 @@ def plot3d( ``x``/``y``/``z``. {wrap} """ + # TODO(GMT>6.5.0): Remove the note for the upstream bug of the "straight_line" + # parameter. kwargs = self._preprocess(**kwargs) kind = data_kind(data) @@ -230,7 +248,8 @@ def plot3d( ("symbol", symbol), ]: if is_nonstr_iter(value): - raise GMTInvalidInput(f"'{name}' can't be 1-D array if 'data' is used.") + msg = f"'{name}' can't be a 1-D array if 'data' is used." + raise GMTInvalidInput(msg) # Set the default style if data has a geometry of Point or MultiPoint if kwargs.get("S") is None and _data_geometry_is_point(data, kind): diff --git a/pygmt/src/project.py b/pygmt/src/project.py index 811a7d48158..a49d5a1ad1f 100644 --- a/pygmt/src/project.py +++ b/pygmt/src/project.py @@ -136,7 +136,7 @@ def project( convention : str Specify the desired output using any combination of **xyzpqrs**, in any order [Default is **xypqrsz**]. Do not space between the letters. - Use lower case. The output will be columns of values corresponding to + Use lowercase. The output will be columns of values corresponding to your ``convention``. The **z** flag is special and refers to all numerical columns beyond the leading **x** and **y** in your input record. The **z** flag also includes any trailing text (which is @@ -222,15 +222,14 @@ def project( (depends on ``output_type``) """ if kwargs.get("C") is None: - raise GMTInvalidInput("The `center` parameter must be specified.") + msg = "The 'center' parameter must be specified." + raise GMTInvalidInput(msg) if kwargs.get("G") is None and data is None: - raise GMTInvalidInput( - "The `data` parameter must be specified unless `generate` is used." - ) + msg = "The 'data' parameter must be specified unless 'generate' is used." + raise GMTInvalidInput(msg) if kwargs.get("G") is not None and kwargs.get("F") is not None: - raise GMTInvalidInput( - "The `convention` parameter is not allowed with `generate`." - ) + msg = "The 'convention' parameter is not allowed with 'generate'." + raise GMTInvalidInput(msg) output_type = validate_output_table_type(output_type, outfile=outfile) diff --git a/pygmt/src/psconvert.py b/pygmt/src/psconvert.py index 1a1c43f2e86..57c88406e8e 100644 --- a/pygmt/src/psconvert.py +++ b/pygmt/src/psconvert.py @@ -115,16 +115,14 @@ def psconvert(self, **kwargs): prefix = kwargs.get("F") if prefix in {"", None, False, True}: - raise GMTInvalidInput( - "The 'prefix' parameter must be specified with a valid value." - ) + msg = "The 'prefix' parameter must be specified with a valid value." + raise GMTInvalidInput(msg) # Check if the parent directory exists prefix_path = Path(prefix).parent if not prefix_path.exists(): - raise FileNotFoundError( - f"No such directory: '{prefix_path}', please create it first." - ) + msg = f"No such directory: '{prefix_path}', please create it first." + raise FileNotFoundError(msg) with Session() as lib: lib.call_module(module="psconvert", args=build_arg_list(kwargs)) diff --git a/pygmt/src/select.py b/pygmt/src/select.py index ecd6d12bfad..a7db421a210 100644 --- a/pygmt/src/select.py +++ b/pygmt/src/select.py @@ -1,3 +1,4 @@ +# noqa: A005 """ select - Select data table subsets based on multiple spatial criteria. """ diff --git a/pygmt/src/solar.py b/pygmt/src/solar.py index f181a0f3047..3a0f95a8180 100644 --- a/pygmt/src/solar.py +++ b/pygmt/src/solar.py @@ -102,10 +102,11 @@ def solar( valid_terminators = ["day_night", "civil", "nautical", "astronomical"] if terminator not in valid_terminators and terminator not in "dcna": - raise GMTInvalidInput( + msg = ( f"Unrecognized solar terminator type '{terminator}'. " f"Valid values are {valid_terminators}." ) + raise GMTInvalidInput(msg) kwargs["T"] = terminator[0] if terminator_datetime: try: @@ -113,7 +114,8 @@ def solar( "%Y-%m-%dT%H:%M:%S.%f" ) except ValueError as verr: - raise GMTInvalidInput("Unrecognized datetime format.") from verr + msg = "Unrecognized datetime format." + raise GMTInvalidInput(msg) from verr kwargs["T"] += f"+d{datetime_string}" with Session() as lib: lib.call_module(module="solar", args=build_arg_list(kwargs)) diff --git a/pygmt/src/sphdistance.py b/pygmt/src/sphdistance.py index 0edbd7029c2..279db4e3590 100644 --- a/pygmt/src/sphdistance.py +++ b/pygmt/src/sphdistance.py @@ -65,10 +65,10 @@ def sphdistance( Specify the quantity that should be assigned to the grid nodes [Default is **d**]: - - **d** - compute distances to the nearest data point - - **n** - assign the ID numbers of the Voronoi polygons that each + - **d**: compute distances to the nearest data point + - **n**: assign the ID numbers of the Voronoi polygons that each grid node is inside - - **z** - assign all nodes inside the polygon the z-value of the center + - **z**: assign all nodes inside the polygon the z-value of the center node for a natural nearest-neighbor grid. Optionally, append the resampling interval along Voronoi arcs in @@ -110,7 +110,8 @@ def sphdistance( ... ) """ if kwargs.get("I") is None or kwargs.get("R") is None: - raise GMTInvalidInput("Both 'region' and 'spacing' must be specified.") + msg = "Both 'region' and 'spacing' must be specified." + raise GMTInvalidInput(msg) with Session() as lib: with ( lib.virtualfile_in(check_kind="vector", data=data, x=x, y=y) as vintbl, diff --git a/pygmt/src/subplot.py b/pygmt/src/subplot.py index d4d769f4c21..1e99d6ed07b 100644 --- a/pygmt/src/subplot.py +++ b/pygmt/src/subplot.py @@ -148,11 +148,11 @@ def subplot(self, nrows=1, ncols=1, **kwargs): kwargs = self._preprocess(**kwargs) if nrows < 1 or ncols < 1: - raise GMTInvalidInput("Please ensure that both 'nrows'>=1 and 'ncols'>=1.") + msg = "Please ensure that both 'nrows'>=1 and 'ncols'>=1." + raise GMTInvalidInput(msg) if kwargs.get("Ff") and kwargs.get("Fs"): - raise GMTInvalidInput( - "Please provide either one of 'figsize' or 'subsize' only." - ) + msg = "Please provide either one of 'figsize' or 'subsize' only." + raise GMTInvalidInput(msg) # Need to use separate sessions for "subplot begin" and "subplot end". # Otherwise, "subplot end" will use the last session, which may cause diff --git a/pygmt/src/ternary.py b/pygmt/src/ternary.py index 5bc477fab99..633707cf427 100644 --- a/pygmt/src/ternary.py +++ b/pygmt/src/ternary.py @@ -87,7 +87,7 @@ def ternary( if any(v is not None for v in labels): kwargs["L"] = "/".join(str(v) if v is not None else "-" for v in labels) - # Patch for GMT < 6.5.0. + # TODO(GMT>=6.5.0): Remove the patch for upstream bug fixed in GMT 6.5.0. # See https://github.com/GenericMappingTools/pygmt/pull/2138 if Version(__gmt_version__) < Version("6.5.0") and isinstance(data, pd.DataFrame): data = data.to_numpy() diff --git a/pygmt/src/text.py b/pygmt/src/text.py index 2ed475c9ac2..b507510f620 100644 --- a/pygmt/src/text.py +++ b/pygmt/src/text.py @@ -5,7 +5,7 @@ from collections.abc import Sequence import numpy as np -from pygmt._typing import AnchorCode +from pygmt._typing import AnchorCode, StringArrayTypes from pygmt.clib import Session from pygmt.exceptions import GMTInvalidInput from pygmt.helpers import ( @@ -48,7 +48,7 @@ def text_( # noqa: PLR0912 x=None, y=None, position: AnchorCode | None = None, - text=None, + text: str | StringArrayTypes | None = None, angle=None, font=None, justify: bool | None | AnchorCode | Sequence[AnchorCode] = None, @@ -104,7 +104,7 @@ def text_( # noqa: PLR0912 For example, ``position="TL"`` plots the text at the Top Left corner of the map. - text : str or 1-D array + text The text string, or an array of strings to plot on the figure. angle: float, str, bool or list Set the angle measured in degrees counter-clockwise from @@ -138,11 +138,11 @@ def text_( # noqa: PLR0912 **i** for inches, or **p** for points; if not given we consult :gmt-term:`PROJ_LENGTH_UNIT`) or *%* for a percentage of the font size. Optionally, use modifier **+t** to set the shape of the text - box when using ``fill`` and/or ``pen``. Append lower case **o** - to get a straight rectangle [Default is **o**]. Append upper case + box when using ``fill`` and/or ``pen``. Append lowercase **o** + to get a straight rectangle [Default is **o**]. Append uppercase **O** to get a rounded rectangle. In paragraph mode (*paragraph*) - you can also append lower case **c** to get a concave rectangle or - append upper case **C** to get a convex rectangle. + you can also append lowercase **c** to get a concave rectangle or + append uppercase **C** to get a convex rectangle. fill : str Set color for filling text boxes [Default is no fill]. offset : str @@ -187,17 +187,21 @@ def text_( # noqa: PLR0912 + (position is not None) + (x is not None or y is not None) ) != 1: - raise GMTInvalidInput("Provide either textfiles, x/y/text, or position/text.") + msg = "Provide either 'textfiles', 'x'/'y'/'text', or 'position'/'text'." + raise GMTInvalidInput(msg) required_data = position is None kind = data_kind(textfiles, required=required_data) if position is not None and (text is None or is_nonstr_iter(text)): - raise GMTInvalidInput("'text' can't be None or array when 'position' is given.") + msg = "'text' can't be None or array when 'position' is given." + raise GMTInvalidInput(msg) if textfiles is not None and text is not None: - raise GMTInvalidInput("'text' can't be specified when 'textfiles' is given.") + msg = "'text' can't be specified when 'textfiles' is given." + raise GMTInvalidInput(msg) if kind == "empty" and text is None: - raise GMTInvalidInput("Must provide text with x/y pairs.") + msg = "Must provide text with x/y pairs." + raise GMTInvalidInput(msg) # Arguments that can accept arrays. array_args = [ @@ -238,15 +242,12 @@ def text_( # noqa: PLR0912 # Append text to the last column. Text must be passed in as str type. text = np.asarray(text, dtype=np.str_) - encoding = _check_encoding("".join(text.flatten())) - if encoding != "ascii": + if (encoding := _check_encoding("".join(text.flatten()))) != "ascii": text = np.vectorize(non_ascii_to_octal, excluded="encoding")( text, encoding=encoding ) + confdict["PS_CHAR_ENCODING"] = encoding extra_arrays.append(text) - - if encoding not in {"ascii", "ISOLatin1+"}: - confdict = {"PS_CHAR_ENCODING": encoding} else: if isinstance(position, str): kwargs["F"] += f"+c{position}+t{text}" diff --git a/pygmt/src/tilemap.py b/pygmt/src/tilemap.py index a13c8d9c740..e61cd82e868 100644 --- a/pygmt/src/tilemap.py +++ b/pygmt/src/tilemap.py @@ -9,13 +9,9 @@ from pygmt.helpers import build_arg_list, fmt_docstring, kwargs_to_strings, use_alias try: - import rioxarray # noqa: F401 from xyzservices import TileProvider - - _HAS_RIOXARRAY = True except ImportError: TileProvider = None - _HAS_RIOXARRAY = False @fmt_docstring @@ -54,7 +50,7 @@ def tilemap( **Note**: By default, standard web map tiles served in a Spherical Mercator (EPSG:3857) Cartesian format will be reprojected to a geographic coordinate - reference system (OGC:WGS84) and plotted with longitude/latitude bounds when + reference system (OGC:CRS84) and plotted with longitude/latitude bounds when ``lonlat=True``. If reprojection is not desired, please set ``lonlat=False`` and provide Spherical Mercator (EPSG:3857) coordinates to the ``region`` parameter. @@ -111,38 +107,21 @@ def tilemap( kwargs : dict Extra keyword arguments to pass to :meth:`pygmt.Figure.grdimage`. - - Raises - ------ - ImportError - If ``rioxarray`` is not installed. Follow - :doc:`install instructions for rioxarray `, (e.g. via - ``python -m pip install rioxarray``) before using this function. """ kwargs = self._preprocess(**kwargs) - if not _HAS_RIOXARRAY: - raise ImportError( - "Package `rioxarray` is required to be installed to use this function. " - "Please use `python -m pip install rioxarray` or " - "`mamba install -c conda-forge rioxarray` to install the package." - ) - raster = load_tile_map( region=region, zoom=zoom, source=source, lonlat=lonlat, + crs="OGC:CRS84" if lonlat is True else "EPSG:3857", wait=wait, max_retries=max_retries, zoom_adjust=zoom_adjust, ) - - # Reproject raster from Spherical Mercator (EPSG:3857) to lonlat (OGC:CRS84) if - # bounding box region was provided in lonlat - if lonlat and raster.rio.crs == "EPSG:3857": - raster = raster.rio.reproject(dst_crs="OGC:CRS84") - raster.gmt.gtype = 1 # set to geographic type + if lonlat: + raster.gmt.gtype = 1 # Set to geographic type # Only set region if no_clip is None or False, so that plot is clipped to exact # bounding box region diff --git a/pygmt/src/timestamp.py b/pygmt/src/timestamp.py index 3db9ff694d9..13f278b2816 100644 --- a/pygmt/src/timestamp.py +++ b/pygmt/src/timestamp.py @@ -84,6 +84,7 @@ def timestamp( kwdict["U"] += f"{label}" kwdict["U"] += f"+j{justify}" + # TODO(GMT>=6.5.0): Remove the patch for upstream bug fixed in GMT 6.5.0. if Version(__gmt_version__) < Version("6.5.0") and "/" not in str(offset): # Giving a single offset doesn't work in GMT < 6.5.0. # See https://github.com/GenericMappingTools/gmt/issues/7107. @@ -99,6 +100,7 @@ def timestamp( "The given text string will be truncated to 64 characters." ) warnings.warn(message=msg, category=RuntimeWarning, stacklevel=2) + # TODO(GMT>=6.5.0): Remove the workaround for the new '+t' modifier. if Version(__gmt_version__) < Version("6.5.0"): # Workaround for GMT<6.5.0 by overriding the 'timefmt' parameter timefmt = text[:64] diff --git a/pygmt/src/triangulate.py b/pygmt/src/triangulate.py index b5c701e59ed..1fa60d6a301 100644 --- a/pygmt/src/triangulate.py +++ b/pygmt/src/triangulate.py @@ -94,7 +94,7 @@ def regular_grid( Parameters ---------- - x/y/z : np.ndarray + x/y/z : :class:`numpy.ndarray` Arrays of x and y coordinates and values z of the data points. data : str, {table-like} Pass in (x, y[, z]) or (longitude, latitude[, elevation]) values by @@ -125,9 +125,8 @@ def regular_grid( ret Return type depends on whether the ``outgrid`` parameter is set: - - xarray.DataArray if ``outgrid`` is None (default) - - None if ``outgrid`` is a str (grid output is stored in - ``outgrid``) + - :class:`xarray.DataArray` if ``outgrid`` is ``None`` [Default] + - ``None`` if ``outgrid`` is a str (grid output is stored in ``outgrid``) Note ---- @@ -197,7 +196,7 @@ def delaunay_triples( Parameters ---------- - x/y/z : np.ndarray + x/y/z : :class:`numpy.ndarray` Arrays of x and y coordinates and values z of the data points. data : str, {table-like} Pass in (x, y, z) or (longitude, latitude, elevation) values by @@ -222,7 +221,7 @@ def delaunay_triples( ret Return type depends on ``outfile`` and ``output_type``: - - ``None`` if ``outfile`` is set (output will be stored in file set by + - ``None`` if ``outfile`` is set (output will be stored in the file set by ``outfile``) - :class:`pandas.DataFrame` or :class:`numpy.ndarray` if ``outfile`` is not set (depends on ``output_type``) diff --git a/pygmt/src/velo.py b/pygmt/src/velo.py index 4c536cb2ea0..06f7f4c4600 100644 --- a/pygmt/src/velo.py +++ b/pygmt/src/velo.py @@ -243,15 +243,15 @@ def velo(self, data=None, **kwargs): if kwargs.get("S") is None or ( kwargs.get("S") is not None and not isinstance(kwargs["S"], str) ): - raise GMTInvalidInput( - "The parameter `spec` is required and has to be a string." - ) + msg = "The parameter 'spec' is required and has to be a string." + raise GMTInvalidInput(msg) if isinstance(data, np.ndarray) and not pd.api.types.is_numeric_dtype(data): - raise GMTInvalidInput( + msg = ( "Text columns are not supported with numpy.ndarray type inputs. " "They are only supported with file or pandas.DataFrame inputs." ) + raise GMTInvalidInput(msg) with Session() as lib: with lib.virtualfile_in(check_kind="vector", data=data) as vintbl: diff --git a/pygmt/src/vlines.py b/pygmt/src/vlines.py new file mode 100644 index 00000000000..7f1919baa9c --- /dev/null +++ b/pygmt/src/vlines.py @@ -0,0 +1,132 @@ +""" +vlines - Plot vertical lines. +""" + +from collections.abc import Sequence + +import numpy as np +from pygmt.exceptions import GMTInvalidInput + +__doctest_skip__ = ["vlines"] + + +def vlines( + self, + x: float | Sequence[float], + ymin: float | Sequence[float] | None = None, + ymax: float | Sequence[float] | None = None, + pen: str | None = None, + label: str | None = None, + no_clip: bool = False, + perspective: str | bool | None = None, +): + """ + Plot one or multiple vertical line(s). + + This method is a high-level wrapper around :meth:`pygmt.Figure.plot` that focuses on + plotting vertical lines at X-coordinates specified by the ``x`` parameter. The ``x`` + parameter can be a single value (for a single vertical line) or a sequence of values + (for multiple vertical lines). + + By default, the Y-coordinates of the start and end points of the lines are set to be + the Y-limits of the current plot, but this can be overridden by specifying the + ``ymin`` and ``ymax`` parameters. ``ymin`` and ``ymax`` can be either a single value + or a sequence of values. If a single value is provided, it is applied to all lines. + If a sequence is provided, the length of ``ymin`` and ``ymax`` must match the length + of ``x``. + + The term "vertical" lines can be interpreted differently in different coordinate + systems: + + - **Cartesian**: lines are plotted as straight lines. + - **Polar**: lines are plotted as straight lines along a constant azimuth. + - **Geographic**: lines are plotted as arcs along meridians (i.e., constant + longitude). + + Parameters + ---------- + x + X-coordinates to plot the lines. It can be a single value (for a single line) + or a sequence of values (for multiple lines). + ymin/ymax + Y-coordinates of the start/end point(s) of the line(s). If ``None``, defaults to + the Y-limits of the current plot. ``ymin`` and ``ymax`` can either be a single + value or a sequence of values. If a single value is provided, it is applied to + all lines. If a sequence is provided, the length of ``ymin`` and ``ymax`` must + match the length of ``x``. + pen + Pen attributes for the line(s), in the format of *width,color,style*. + label + Label for the line(s), to be displayed in the legend. + no_clip + If ``True``, do not clip lines outside the plot region. Only makes sense in the + Cartesian coordinate system. + perspective + Select perspective view and set the azimuth and elevation angle of the + viewpoint. Refer to :meth:`pygmt.Figure.plot` for details. + + Examples + -------- + >>> import pygmt + >>> fig = pygmt.Figure() + >>> fig.basemap(region=[0, 10, 0, 10], projection="X10c/10c", frame=True) + >>> fig.vlines(x=1, pen="1p,black", label="Line at x=1") + >>> fig.vlines(x=2, ymin=2, ymax=8, pen="1p,red,-", label="Line at x=2") + >>> fig.vlines(x=[3, 4], ymin=3, ymax=7, pen="1p,black,.", label="Lines at x=3,4") + >>> fig.vlines(x=[5, 6], ymin=4, ymax=9, pen="1p,red", label="Lines at x=5,6") + >>> fig.vlines( + ... x=[7, 8], ymin=[0, 1], ymax=[7, 8], pen="1p,blue", label="Lines at x=7,8" + ... ) + >>> fig.legend() + >>> fig.show() + """ + self._preprocess() + + # Determine the y limits from the current plot region if not specified. + if ymin is None or ymax is None: + ylimits = self.region[2:] + if ymin is None: + ymin = ylimits[0] + if ymax is None: + ymax = ylimits[1] + + # Ensure x/ymin/ymax are 1-D arrays. + _x = np.atleast_1d(x) + _ymin = np.atleast_1d(ymin) + _ymax = np.atleast_1d(ymax) + + nlines = len(_x) # Number of lines to plot. + + # Check if ymin/ymax are scalars or have the expected length. + if _ymin.size not in {1, nlines} or _ymax.size not in {1, nlines}: + msg = ( + f"'ymin' and 'ymax' are expected to be scalars or have lengths '{nlines}', " + f"but lengths '{_ymin.size}' and '{_ymax.size}' are given." + ) + raise GMTInvalidInput(msg) + + # Repeat ymin/ymax to match the length of x if they are scalars. + if nlines != 1: + if _ymin.size == 1: + _ymin = np.repeat(_ymin, nlines) + if _ymax.size == 1: + _ymax = np.repeat(_ymax, nlines) + + # Call the Figure.plot method to plot the lines. + for i in range(nlines): + # Special handling for label. + # 1. Only specify a label when plotting the first line. + # 2. The -l option can accept comma-separated labels for labeling multiple lines + # with auto-coloring enabled. We don't need this feature here, so we need to + # replace comma with \054 if the label contains commas. + _label = label.replace(",", "\\054") if label and i == 0 else None + + self.plot( + x=[_x[i], _x[i]], + y=[_ymin[i], _ymax[i]], + pen=pen, + label=_label, + no_clip=no_clip, + perspective=perspective, + straight_line="y", + ) diff --git a/pygmt/src/which.py b/pygmt/src/which.py index 2871693c666..69b9ce9b700 100644 --- a/pygmt/src/which.py +++ b/pygmt/src/which.py @@ -68,7 +68,8 @@ def which(fname, **kwargs) -> str | list[str]: match paths.size: case 0: _fname = "', '".join(fname) if is_nonstr_iter(fname) else fname - raise FileNotFoundError(f"File(s) '{_fname}' not found.") + msg = f"File(s) '{_fname}' not found." + raise FileNotFoundError(msg) case 1: return paths[0] case _: diff --git a/pygmt/src/x2sys_cross.py b/pygmt/src/x2sys_cross.py index 382f560f6f7..4e209d33d18 100644 --- a/pygmt/src/x2sys_cross.py +++ b/pygmt/src/x2sys_cross.py @@ -23,13 +23,13 @@ @contextlib.contextmanager def tempfile_from_dftrack(track, suffix): """ - Saves pandas.DataFrame track table to a temporary tab-separated ASCII text file with - a unique name (to prevent clashes when running x2sys_cross), adding a suffix - extension to the end. + Saves :class:`pandas.DataFrame` track table to a temporary tab-separated ASCII text + file with a unique name (to prevent clashes when running x2sys_cross), adding a + suffix extension to the end. Parameters ---------- - track : pandas.DataFrame + track : :class:`pandas.DataFrame` A table holding track data with coordinate (x, y) or (lon, lat) values, and (optionally) time (t). suffix : str @@ -39,7 +39,7 @@ def tempfile_from_dftrack(track, suffix): ------ tmpfilename : str A temporary tab-separated value file with a unique name holding the - track data. E.g. 'track-1a2b3c4.tsv'. + track data. E.g. "track-1a2b3c4.tsv". """ try: tmpfilename = f"track-{unique_name()[:7]}.{suffix}" @@ -90,9 +90,9 @@ def x2sys_cross( Parameters ---------- - tracks : pandas.DataFrame or str or list + tracks : :class:`pandas.DataFrame`, str, or list A table or a list of tables with (x, y) or (lon, lat) values in the - first two columns. Track(s) can be provided as pandas DataFrame tables + first two columns. Track(s) can be provided as :class:`pandas.DataFrame` tables or file names. Supported file formats are ASCII, native binary, or COARDS netCDF 1-D data. More columns may also be present. @@ -145,9 +145,9 @@ def x2sys_cross( Sets the interpolation mode for estimating values at the crossover. Choose among: - - **l** - Linear interpolation [Default]. - - **a** - Akima spline interpolation. - - **c** - Cubic spline interpolation. + - **l**: Linear interpolation [Default]. + - **a**: Akima spline interpolation. + - **c**: Cubic spline interpolation. coe : str Use **e** for external COEs only, and **i** for internal COEs only @@ -210,7 +210,8 @@ def x2sys_cross( # Save pandas.DataFrame track data to temporary file file_contexts.append(tempfile_from_dftrack(track=track, suffix=suffix)) case _: - raise GMTInvalidInput(f"Unrecognized data type: {type(track)}") + msg = f"Unrecognized data type: {type(track)}." + raise GMTInvalidInput(msg) with Session() as lib: with lib.virtualfile_out(kind="dataset", fname=outfile) as vouttbl: diff --git a/pygmt/src/x2sys_init.py b/pygmt/src/x2sys_init.py index 99af7211424..6e36263fe0e 100644 --- a/pygmt/src/x2sys_init.py +++ b/pygmt/src/x2sys_init.py @@ -85,13 +85,13 @@ def x2sys_init(tag, **kwargs): programs. Append **d** for distance or **s** for speed, then give the desired *unit* as: - - **c** - Cartesian userdist or userdist/usertime - - **e** - meters or m/s - - **f** - feet or ft/s - - **k** - kilometers or km/hr - - **m** - miles or mi/hr - - **n** - nautical miles or knots - - **u** - survey feet or sft/s + - **c**: Cartesian userdist or userdist/usertime + - **e**: meters or m/s + - **f**: feet or ft/s + - **k**: kilometers or km/hr + - **m**: miles or mi/hr + - **n**: nautical miles or knots + - **u**: survey feet or sft/s [Default is ``units=["dk", "se"]`` (km and m/s) if ``discontinuity`` is set, and ``units=["dc", "sc"]`` otherwise (e.g., for Cartesian units)]. diff --git a/pygmt/src/xyz2grd.py b/pygmt/src/xyz2grd.py index f9e738a5f33..ca7c9e94c6b 100644 --- a/pygmt/src/xyz2grd.py +++ b/pygmt/src/xyz2grd.py @@ -143,7 +143,8 @@ def xyz2grd( ... ) """ if kwargs.get("I") is None or kwargs.get("R") is None: - raise GMTInvalidInput("Both 'region' and 'spacing' must be specified.") + msg = "Both 'region' and 'spacing' must be specified." + raise GMTInvalidInput(msg) with Session() as lib: with ( diff --git a/pygmt/tests/baseline/test_hlines_clip.png.dvc b/pygmt/tests/baseline/test_hlines_clip.png.dvc new file mode 100644 index 00000000000..1c24bb1c16d --- /dev/null +++ b/pygmt/tests/baseline/test_hlines_clip.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: e87ea1b80ae5d32d49e9ad94a5c25f96 + size: 7199 + hash: md5 + path: test_hlines_clip.png diff --git a/pygmt/tests/baseline/test_hlines_geographic_global_d.png.dvc b/pygmt/tests/baseline/test_hlines_geographic_global_d.png.dvc new file mode 100644 index 00000000000..960f3a05fdc --- /dev/null +++ b/pygmt/tests/baseline/test_hlines_geographic_global_d.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: b7055f03ff5bc152c0f6b72f2d39f32c + size: 29336 + hash: md5 + path: test_hlines_geographic_global_d.png diff --git a/pygmt/tests/baseline/test_hlines_geographic_global_g.png.dvc b/pygmt/tests/baseline/test_hlines_geographic_global_g.png.dvc new file mode 100644 index 00000000000..29b83d3b44f --- /dev/null +++ b/pygmt/tests/baseline/test_hlines_geographic_global_g.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: ab2e7717cad6ac4132fd3e3af1fefa89 + size: 29798 + hash: md5 + path: test_hlines_geographic_global_g.png diff --git a/pygmt/tests/baseline/test_hlines_multiple_lines.png.dvc b/pygmt/tests/baseline/test_hlines_multiple_lines.png.dvc new file mode 100644 index 00000000000..e915a5a65f0 --- /dev/null +++ b/pygmt/tests/baseline/test_hlines_multiple_lines.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: 70c8decbffd37fc48b2eb9ff84442ec0 + size: 14139 + hash: md5 + path: test_hlines_multiple_lines.png diff --git a/pygmt/tests/baseline/test_hlines_one_line.png.dvc b/pygmt/tests/baseline/test_hlines_one_line.png.dvc new file mode 100644 index 00000000000..aa42ce3f492 --- /dev/null +++ b/pygmt/tests/baseline/test_hlines_one_line.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: 121970f75d34c552e632cacc692f09e9 + size: 13685 + hash: md5 + path: test_hlines_one_line.png diff --git a/pygmt/tests/baseline/test_hlines_polar_projection.png.dvc b/pygmt/tests/baseline/test_hlines_polar_projection.png.dvc new file mode 100644 index 00000000000..4e5bef96dc6 --- /dev/null +++ b/pygmt/tests/baseline/test_hlines_polar_projection.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: 0c0eeb160dd6beb06bb6d3dcc264127a + size: 57789 + hash: md5 + path: test_hlines_polar_projection.png diff --git a/pygmt/tests/baseline/test_plot_datetime.png.dvc b/pygmt/tests/baseline/test_plot_datetime.png.dvc index 714104995ba..1450b29ef82 100644 --- a/pygmt/tests/baseline/test_plot_datetime.png.dvc +++ b/pygmt/tests/baseline/test_plot_datetime.png.dvc @@ -1,5 +1,5 @@ outs: -- md5: 583947facaa873122f0bf18137809cd4 - size: 12695 +- md5: 0a2eae0da1e3d5b71d7392de1c081346 + size: 13124 path: test_plot_datetime.png hash: md5 diff --git a/pygmt/tests/baseline/test_tilemap_ogc_wgs84.png.dvc b/pygmt/tests/baseline/test_tilemap_ogc_crs84.png.dvc similarity index 67% rename from pygmt/tests/baseline/test_tilemap_ogc_wgs84.png.dvc rename to pygmt/tests/baseline/test_tilemap_ogc_crs84.png.dvc index 518f7492c29..4cfc88b1133 100644 --- a/pygmt/tests/baseline/test_tilemap_ogc_wgs84.png.dvc +++ b/pygmt/tests/baseline/test_tilemap_ogc_crs84.png.dvc @@ -1,5 +1,5 @@ outs: - md5: 5f225e4dd26f44e07bcbd8e713c67dbe size: 37343 - path: test_tilemap_ogc_wgs84.png + path: test_tilemap_ogc_crs84.png hash: md5 diff --git a/pygmt/tests/baseline/test_vlines_clip.png.dvc b/pygmt/tests/baseline/test_vlines_clip.png.dvc new file mode 100644 index 00000000000..f20f77ae249 --- /dev/null +++ b/pygmt/tests/baseline/test_vlines_clip.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: 4eb9c7fd7e3a803dcc3cde1409ad7fa7 + size: 7361 + hash: md5 + path: test_vlines_clip.png diff --git a/pygmt/tests/baseline/test_vlines_geographic_global.png.dvc b/pygmt/tests/baseline/test_vlines_geographic_global.png.dvc new file mode 100644 index 00000000000..d09fa8f8d82 --- /dev/null +++ b/pygmt/tests/baseline/test_vlines_geographic_global.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: 3fb4a271c670e4cbe647838b6fee5a8c + size: 67128 + hash: md5 + path: test_vlines_geographic_global.png diff --git a/pygmt/tests/baseline/test_vlines_multiple_lines.png.dvc b/pygmt/tests/baseline/test_vlines_multiple_lines.png.dvc new file mode 100644 index 00000000000..da9a4bf8aed --- /dev/null +++ b/pygmt/tests/baseline/test_vlines_multiple_lines.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: 499b2d08832247673f208b1c0a282c4c + size: 13874 + hash: md5 + path: test_vlines_multiple_lines.png diff --git a/pygmt/tests/baseline/test_vlines_one_line.png.dvc b/pygmt/tests/baseline/test_vlines_one_line.png.dvc new file mode 100644 index 00000000000..efc2df680b3 --- /dev/null +++ b/pygmt/tests/baseline/test_vlines_one_line.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: 2cd30ad55fc660123c67e6a684a5ea21 + size: 13589 + hash: md5 + path: test_vlines_one_line.png diff --git a/pygmt/tests/baseline/test_vlines_polar_projection.png.dvc b/pygmt/tests/baseline/test_vlines_polar_projection.png.dvc new file mode 100644 index 00000000000..1252a2d0455 --- /dev/null +++ b/pygmt/tests/baseline/test_vlines_polar_projection.png.dvc @@ -0,0 +1,5 @@ +outs: +- md5: 1981df3bd9c57cd975b6e74946496175 + size: 44621 + hash: md5 + path: test_vlines_polar_projection.png diff --git a/pygmt/tests/test_accessor.py b/pygmt/tests/test_accessor.py index 2701d2a0b3a..07ece609b69 100644 --- a/pygmt/tests/test_accessor.py +++ b/pygmt/tests/test_accessor.py @@ -73,6 +73,7 @@ def test_accessor_set_non_boolean(): grid.gmt.gtype = 2 +# TODO(GMT>=6.5.0): Remove the xfail marker for GMT>=6.5.0. @pytest.mark.xfail( condition=sys.platform == "win32" and Version(__gmt_version__) < Version("6.5.0"), reason="Upstream bug fixed in https://github.com/GenericMappingTools/gmt/pull/7573", diff --git a/pygmt/tests/test_clib_loading.py b/pygmt/tests/test_clib_loading.py index 3c65c5caf46..8b96128c0da 100644 --- a/pygmt/tests/test_clib_loading.py +++ b/pygmt/tests/test_clib_loading.py @@ -9,6 +9,7 @@ import sys import types from pathlib import PurePath +from unittest import mock import pytest from pygmt.clib.loading import ( @@ -70,24 +71,15 @@ def test_load_libgmt(): check_libgmt(load_libgmt()) -def test_load_libgmt_fails(monkeypatch): +def test_load_libgmt_fails(): """ Test that GMTCLibNotFoundError is raised when GMT's shared library cannot be found. """ - with monkeypatch.context() as mpatch: - if sys.platform == "win32": - mpatch.setattr(ctypes.util, "find_library", lambda name: "fakegmt.dll") # noqa: ARG005 - mpatch.setattr( - sys, - "platform", - # Pretend to be on macOS if running on Linux, and vice versa - "darwin" if sys.platform == "linux" else "linux", - ) - mpatch.setattr( - subprocess, - "check_output", - lambda cmd, encoding: "libfakegmt.so", # noqa: ARG005 - ) + with ( + mock.patch("ctypes.util.find_library", return_value="fakegmt.dll"), + mock.patch("sys.platform", "darwin" if sys.platform == "linux" else "linux"), + mock.patch("subprocess.check_output", return_value="libfakegmt.so"), + ): with pytest.raises(GMTCLibNotFoundError): check_libgmt(load_libgmt()) @@ -133,7 +125,8 @@ def _mock_ctypes_cdll_return(self, libname): if isinstance(libname, str): # libname is an invalid library path in string type, # raise OSError like the original ctypes.CDLL - raise OSError(f"Unable to find '{libname}'") + msg = f"Unable to find '{libname}'." + raise OSError(msg) # libname is a loaded GMT library return self.loaded_libgmt @@ -213,42 +206,25 @@ def test_brokenlib_brokenlib_workinglib(self): assert check_libgmt(load_libgmt(lib_fullnames=lib_fullnames)) is None -class TestLibgmtCount: +def test_libgmt_load_counter(): """ - Test that the GMT library is not repeatedly loaded in every session. + Make sure that the GMT library is not loaded in every session. """ - - loaded_libgmt = load_libgmt() # Load the GMT library and reuse it when necessary - counter = 0 # Global counter for how many times ctypes.CDLL is called - - def _mock_ctypes_cdll_return(self, libname): # noqa: ARG002 - """ - Mock ctypes.CDLL to count how many times the function is called. - - If ctypes.CDLL is called, the counter increases by one. - """ - self.counter += 1 # Increase the counter - return self.loaded_libgmt - - def test_libgmt_load_counter(self, monkeypatch): - """ - Make sure that the GMT library is not loaded in every session. - """ - # Monkeypatch the ctypes.CDLL function - monkeypatch.setattr(ctypes, "CDLL", self._mock_ctypes_cdll_return) - - # Create two sessions and check the global counter + loaded_libgmt = load_libgmt() # Load the GMT library and reuse it when necessary. + with mock.patch("ctypes.CDLL", return_value=loaded_libgmt) as mock_cdll: + # Create two sessions and check the call count with Session() as lib: _ = lib with Session() as lib: _ = lib - assert self.counter == 0 # ctypes.CDLL is not called after two sessions. + # ctypes.CDLL is not called after two sessions. + assert mock_cdll.call_count == 0 - # Explicitly calling load_libgmt to make sure the mock function is correct + # Explicitly calling load_libgmt to make sure the mock function is correct. load_libgmt() - assert self.counter == 1 + assert mock_cdll.call_count == 1 load_libgmt() - assert self.counter == 2 + assert mock_cdll.call_count == 2 ############################################################################### diff --git a/pygmt/tests/test_clib_put_vector.py b/pygmt/tests/test_clib_put_vector.py index 968e09bf87c..cad77ba59cf 100644 --- a/pygmt/tests/test_clib_put_vector.py +++ b/pygmt/tests/test_clib_put_vector.py @@ -126,7 +126,7 @@ def test_put_vector_string_dtype(): "2021-02-03T00:00:00", "2021-02-03T04:00:00", "2021-02-03T04:05:06", - f"{datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%d')}T04:50:06", + f"{datetime.datetime.now(tz=datetime.UTC).strftime('%Y-%m-%d')}T04:50:06", ], ] diff --git a/pygmt/tests/test_clib_to_numpy.py b/pygmt/tests/test_clib_to_numpy.py index a2f8ba4a2aa..40b45e466d8 100644 --- a/pygmt/tests/test_clib_to_numpy.py +++ b/pygmt/tests/test_clib_to_numpy.py @@ -2,8 +2,8 @@ Tests for the _to_numpy function in the clib.conversion module. """ +import datetime import sys -from datetime import date, datetime import numpy as np import numpy.testing as npt @@ -11,14 +11,32 @@ import pytest from packaging.version import Version from pygmt.clib.conversion import _to_numpy +from pygmt.helpers.testing import skip_if_no try: import pyarrow as pa _HAS_PYARROW = True except ImportError: + + class pa: # noqa: N801 + """ + A dummy class to mimic pyarrow. + """ + + __version__ = "0.0.0" + + @staticmethod + def timestamp(unit: str, tz: str | None = None): + """ + A dummy function to mimic pyarrow.timestamp. + """ + _HAS_PYARROW = False +# Mark tests that require pyarrow +pa_marks = {"marks": skip_if_no(package="pyarrow")} + def _check_result(result, expected_dtype): """ @@ -36,6 +54,7 @@ def _check_result(result, expected_dtype): @pytest.mark.parametrize( ("data", "expected_dtype"), [ + # TODO(NumPy>=2.0): Remove the if-else statement after NumPy>=2.0. pytest.param( [1, 2, 3], np.int32 @@ -61,6 +80,70 @@ def test_to_numpy_python_types(data, expected_dtype): npt.assert_array_equal(result, data) +@pytest.mark.parametrize( + "data", + [ + pytest.param( + ["2018", "2018-02", "2018-03-01", "2018-04-01T01:02:03"], id="iso8601" + ), + pytest.param( + [ + datetime.date(2018, 1, 1), + datetime.datetime(2018, 2, 1), + datetime.date(2018, 3, 1), + datetime.datetime(2018, 4, 1, 1, 2, 3), + ], + id="datetime", + ), + pytest.param( + [ + np.datetime64("2018"), + np.datetime64("2018-02"), + np.datetime64("2018-03-01"), + np.datetime64("2018-04-01T01:02:03"), + ], + id="np_datetime64", + ), + pytest.param( + [ + pd.Timestamp("2018-01-01"), + pd.Timestamp("2018-02-01"), + pd.Timestamp("2018-03-01"), + pd.Timestamp("2018-04-01T01:02:03"), + ], + id="pd_timestamp", + ), + pytest.param( + [ + "2018-01-01", + np.datetime64("2018-02-01"), + datetime.datetime(2018, 3, 1), + pd.Timestamp("2018-04-01T01:02:03"), + ], + id="mixed", + ), + ], +) +def test_to_numpy_python_datetime(data): + """ + Test the _to_numpy function with Python sequence of datetime types. + """ + result = _to_numpy(data) + assert result.dtype.type == np.datetime64 + npt.assert_array_equal( + result, + np.array( + [ + "2018-01-01T00:00:00", + "2018-02-01T00:00:00", + "2018-03-01T00:00:00", + "2018-04-01T01:02:03", + ], + dtype="datetime64[s]", + ), + ) + + ######################################################################################## # Test the _to_numpy function with NumPy arrays. # @@ -102,9 +185,9 @@ def test_to_numpy_python_types(data, expected_dtype): @pytest.mark.parametrize(("dtype", "expected_dtype"), np_dtype_params) -def test_to_numpy_ndarray_numpy_dtypes_numeric(dtype, expected_dtype): +def test_to_numpy_numpy_numeric(dtype, expected_dtype): """ - Test the _to_numpy function with NumPy arrays of NumPy numeric dtypes. + Test the _to_numpy function with NumPy arrays of numeric dtypes. Test both 1-D and 2-D arrays which are not C-contiguous. """ @@ -124,9 +207,9 @@ def test_to_numpy_ndarray_numpy_dtypes_numeric(dtype, expected_dtype): @pytest.mark.parametrize("dtype", [None, np.str_, "U10"]) -def test_to_numpy_ndarray_numpy_dtypes_string(dtype): +def test_to_numpy_numpy_string(dtype): """ - Test the _to_numpy function with NumPy arrays of string types. + Test the _to_numpy function with NumPy arrays of string dtypes. """ array = np.array(["abc", "defg", "12345"], dtype=dtype) result = _to_numpy(array) @@ -134,6 +217,36 @@ def test_to_numpy_ndarray_numpy_dtypes_string(dtype): npt.assert_array_equal(result, array) +@pytest.mark.parametrize( + "dtype", + [ + np.datetime64, # The expected dtype is "datetime64[D]" for this test. + "datetime64[Y]", + "datetime64[M]", + "datetime64[W]", + "datetime64[D]", + "datetime64[h]", + "datetime64[m]", + "datetime64[s]", + "datetime64[ms]", + "datetime64[us]", + "datetime64[ns]", + ], +) +def test_to_numpy_numpy_datetime(dtype): + """ + Test the _to_ndarray function with 1-D NumPy arrays of datetime. + + Time units "fs", "as", "ps" are not tested here because they can only represent a + small range of times in 1969-1970. + """ + array = np.array(["2024-01-01", "2024-01-02", "2024-01-03"], dtype=dtype) + result = _to_numpy(array) + _check_result(result, np.datetime64) + assert result.dtype == (dtype if isinstance(dtype, str) else "datetime64[D]") + npt.assert_array_equal(result, array) + + ######################################################################################## # Test the _to_numpy function with pandas.Series. # @@ -158,22 +271,142 @@ def test_to_numpy_ndarray_numpy_dtypes_string(dtype): # - BooleanDtype # - ArrowDtype: a special dtype used to store data in the PyArrow format. # +# In pandas, PyArrow types can be specified using the following formats: +# +# - Prefixed with the name of the dtype and "[pyarrow]" (e.g., "int8[pyarrow]") +# - Specified using ``ArrowDType`` (e.g., "pd.ArrowDtype(pa.int8())") +# # References: # 1. https://pandas.pydata.org/docs/reference/arrays.html # 2. https://pandas.pydata.org/docs/user_guide/basics.html#basics-dtypes # 3. https://pandas.pydata.org/docs/user_guide/pyarrow.html ######################################################################################## -@pytest.mark.parametrize(("dtype", "expected_dtype"), np_dtype_params) -def test_to_numpy_pandas_series_numpy_dtypes_numeric(dtype, expected_dtype): +@pytest.mark.parametrize( + ("dtype", "expected_dtype"), + [ + *np_dtype_params, + pytest.param(pd.Int8Dtype(), np.int8, id="Int8"), + pytest.param(pd.Int16Dtype(), np.int16, id="Int16"), + pytest.param(pd.Int32Dtype(), np.int32, id="Int32"), + pytest.param(pd.Int64Dtype(), np.int64, id="Int64"), + pytest.param(pd.UInt8Dtype(), np.uint8, id="UInt8"), + pytest.param(pd.UInt16Dtype(), np.uint16, id="UInt16"), + pytest.param(pd.UInt32Dtype(), np.uint32, id="UInt32"), + pytest.param(pd.UInt64Dtype(), np.uint64, id="UInt64"), + pytest.param(pd.Float32Dtype(), np.float32, id="Float32"), + pytest.param(pd.Float64Dtype(), np.float64, id="Float64"), + pytest.param("int8[pyarrow]", np.int8, id="int8[pyarrow]", **pa_marks), + pytest.param("int16[pyarrow]", np.int16, id="int16[pyarrow]", **pa_marks), + pytest.param("int32[pyarrow]", np.int32, id="int32[pyarrow]", **pa_marks), + pytest.param("int64[pyarrow]", np.int64, id="int64[pyarrow]", **pa_marks), + pytest.param("uint8[pyarrow]", np.uint8, id="uint8[pyarrow]", **pa_marks), + pytest.param("uint16[pyarrow]", np.uint16, id="uint16[pyarrow]", **pa_marks), + pytest.param("uint32[pyarrow]", np.uint32, id="uint32[pyarrow]", **pa_marks), + pytest.param("uint64[pyarrow]", np.uint64, id="uint64[pyarrow]", **pa_marks), + pytest.param("float16[pyarrow]", np.float16, id="float16[pyarrow]", **pa_marks), + pytest.param("float32[pyarrow]", np.float32, id="float32[pyarrow]", **pa_marks), + pytest.param("float64[pyarrow]", np.float64, id="float64[pyarrow]", **pa_marks), + ], +) +def test_to_numpy_pandas_numeric(dtype, expected_dtype): """ - Test the _to_numpy function with pandas.Series of NumPy numeric dtypes. + Test the _to_numpy function with pandas.Series of numeric dtypes. """ - series = pd.Series([1, 2, 3, 4, 5, 6], dtype=dtype)[::2] # Not C-contiguous + data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] + # TODO(pandas>=2.2): Remove the workaround for float16 dtype in pandas<2.2. + # float16 needs special handling for pandas < 2.2. + # Example from https://arrow.apache.org/docs/python/generated/pyarrow.float16.html + if dtype == "float16[pyarrow]" and Version(pd.__version__) < Version("2.2"): + data = np.array(data, dtype=np.float16) + series = pd.Series(data, dtype=dtype)[::2] # Not C-contiguous result = _to_numpy(series) _check_result(result, expected_dtype) npt.assert_array_equal(result, series) +@pytest.mark.parametrize( + ("dtype", "expected_dtype"), + [ + pytest.param(np.float16, np.float16, id="float16"), + pytest.param(np.float32, np.float32, id="float32"), + pytest.param(np.float64, np.float64, id="float64"), + pytest.param(np.longdouble, np.longdouble, id="longdouble"), + pytest.param(pd.Int8Dtype(), np.float64, id="Int8"), + pytest.param(pd.Int16Dtype(), np.float64, id="Int16"), + pytest.param(pd.Int32Dtype(), np.float64, id="Int32"), + pytest.param(pd.Int64Dtype(), np.float64, id="Int64"), + pytest.param(pd.UInt8Dtype(), np.float64, id="UInt8"), + pytest.param(pd.UInt16Dtype(), np.float64, id="UInt16"), + pytest.param(pd.UInt32Dtype(), np.float64, id="UInt32"), + pytest.param(pd.UInt64Dtype(), np.float64, id="UInt64"), + pytest.param(pd.Float32Dtype(), np.float32, id="Float32"), + pytest.param(pd.Float64Dtype(), np.float64, id="Float64"), + pytest.param("int8[pyarrow]", np.float64, id="int8[pyarrow]", **pa_marks), + pytest.param("int16[pyarrow]", np.float64, id="int16[pyarrow]", **pa_marks), + pytest.param("int32[pyarrow]", np.float64, id="int32[pyarrow]", **pa_marks), + pytest.param("int64[pyarrow]", np.float64, id="int64[pyarrow]", **pa_marks), + pytest.param("uint8[pyarrow]", np.float64, id="uint8[pyarrow]", **pa_marks), + pytest.param("uint16[pyarrow]", np.float64, id="uint16[pyarrow]", **pa_marks), + pytest.param("uint32[pyarrow]", np.float64, id="uint32[pyarrow]", **pa_marks), + pytest.param("uint64[pyarrow]", np.float64, id="uint64[pyarrow]", **pa_marks), + pytest.param("float16[pyarrow]", np.float16, id="float16[pyarrow]", **pa_marks), + pytest.param("float32[pyarrow]", np.float32, id="float32[pyarrow]", **pa_marks), + pytest.param("float64[pyarrow]", np.float64, id="float64[pyarrow]", **pa_marks), + ], +) +def test_to_numpy_pandas_numeric_with_na(dtype, expected_dtype): + """ + Test the _to_numpy function with pandas.Series of NumPy/pandas/PyArrow numeric + dtypes and missing values (NA). + """ + data = [1.0, 2.0, None, 4.0, 5.0, 6.0] + # TODO(pandas>=2.2): Remove the workaround for float16 dtype in pandas<2.2. + # float16 needs special handling for pandas < 2.2. + # Example from https://arrow.apache.org/docs/python/generated/pyarrow.float16.html + if dtype == "float16[pyarrow]" and Version(pd.__version__) < Version("2.2"): + data = np.array(data, dtype=np.float16) + series = pd.Series(data, dtype=dtype)[::2] # Not C-contiguous + assert series.isna().any() + result = _to_numpy(series) + _check_result(result, expected_dtype) + npt.assert_array_equal(result, np.array([1.0, np.nan, 5.0], dtype=expected_dtype)) + + +@pytest.mark.parametrize( + "dtype", + [ + None, + np.str_, + "U10", + "string[python]", + pytest.param("string[pyarrow]", marks=skip_if_no(package="pyarrow")), + pytest.param( + "string[pyarrow_numpy]", + marks=[ + skip_if_no(package="pyarrow"), + # TODO(pandas>=2.1): Remove the skipif marker for pandas<2.1. + pytest.mark.skipif( + Version(pd.__version__) < Version("2.1"), + reason="string[pyarrow_numpy] was added since pandas 2.1", + ), + ], + ), + ], +) +def test_to_numpy_pandas_string(dtype): + """ + Test the _to_numpy function with pandas.Series of string dtypes. + + In pandas, string arrays can be specified in multiple ways. + + Reference: https://pandas.pydata.org/docs/reference/api/pandas.StringDtype.html + """ + array = pd.Series(["abc", "defg", "12345"], dtype=dtype) + result = _to_numpy(array) + _check_result(result, np.str_) + npt.assert_array_equal(result, array) + + @pytest.mark.skipif(not _HAS_PYARROW, reason="pyarrow is not installed") @pytest.mark.parametrize( ("dtype", "expected_dtype"), @@ -182,7 +415,7 @@ def test_to_numpy_pandas_series_numpy_dtypes_numeric(dtype, expected_dtype): pytest.param("date64[ms][pyarrow]", "datetime64[ms]", id="date64[ms]"), ], ) -def test_to_numpy_pandas_series_pyarrow_dtypes_date(dtype, expected_dtype): +def test_to_numpy_pandas_date(dtype, expected_dtype): """ Test the _to_numpy function with pandas.Series of PyArrow date32/date64 types. """ @@ -196,6 +429,113 @@ def test_to_numpy_pandas_series_pyarrow_dtypes_date(dtype, expected_dtype): ) +pandas_old_version = pytest.mark.xfail( + condition=Version(pd.__version__) < Version("2.1"), + reason="pandas 2.0 bug reported in https://github.com/pandas-dev/pandas/issues/52705", +) + + +@pytest.mark.parametrize( + ("dtype", "expected_dtype"), + [ + # NumPy datetime64 types. Only unit 's'/'ms'/'us'/'ns' are supported. + pytest.param("datetime64[s]", "datetime64[s]", id="datetime64[s]"), + pytest.param("datetime64[ms]", "datetime64[ms]", id="datetime64[ms]"), + pytest.param("datetime64[us]", "datetime64[us]", id="datetime64[us]"), + pytest.param("datetime64[ns]", "datetime64[ns]", id="datetime64[ns]"), + # pandas.DatetimeTZDtype can be given in two ways [tz is required]: + # 1. pandas.DatetimeTZDtype(unit, tz) + # 2. String aliases: "datetime64[unit, tz]" + pytest.param( + "datetime64[s, UTC]", + "datetime64[s]", + id="datetime64[s, tz=UTC]", + marks=pandas_old_version, + ), + pytest.param( + "datetime64[s, America/New_York]", + "datetime64[s]", + id="datetime64[s, tz=America/New_York]", + marks=pandas_old_version, + ), + pytest.param( + "datetime64[s, +07:30]", + "datetime64[s]", + id="datetime64[s, +07:30]", + marks=pandas_old_version, + ), + # PyArrow timestamp types can be given in two ways [tz is optional]: + # 1. pd.ArrowDtype(pyarrow.Timestamp(unit, tz=tz)) + # 2. String aliases: "timestamp[unit, tz][pyarrow]" + pytest.param( + "timestamp[s][pyarrow]", + "datetime64[s]", + id="timestamp[s][pyarrow]", + marks=skip_if_no(package="pyarrow"), + ), + pytest.param( + "timestamp[ms][pyarrow]", + "datetime64[ms]", + id="timestamp[ms][pyarrow]", + marks=[skip_if_no(package="pyarrow"), pandas_old_version], + ), + pytest.param( + "timestamp[us][pyarrow]", + "datetime64[us]", + id="timestamp[us][pyarrow]", + marks=[skip_if_no(package="pyarrow"), pandas_old_version], + ), + pytest.param( + "timestamp[ns][pyarrow]", + "datetime64[ns]", + id="timestamp[ns][pyarrow]", + marks=skip_if_no(package="pyarrow"), + ), + pytest.param( + "timestamp[s, UTC][pyarrow]", + "datetime64[s]", + id="timestamp[s, UTC][pyarrow]", + marks=skip_if_no(package="pyarrow"), + ), + pytest.param( + "timestamp[s, America/New_York][pyarrow]", + "datetime64[s]", + id="timestamp[s, America/New_York][pyarrow]", + marks=skip_if_no(package="pyarrow"), + ), + pytest.param( + "timestamp[s, +08:00][pyarrow]", + "datetime64[s]", + id="timestamp[s, +08:00][pyarrow]", + marks=skip_if_no(package="pyarrow"), + ), + ], +) +def test_to_numpy_pandas_datetime(dtype, expected_dtype): + """ + Test the _to_numpy function with pandas.Series of datetime types. + """ + series = pd.Series( + [pd.Timestamp("2024-01-02T03:04:05"), pd.Timestamp("2024-01-02T03:04:06")], + dtype=dtype, + ) + result = _to_numpy(series) + _check_result(result, np.datetime64) + assert result.dtype == expected_dtype + + # Convert to UTC if the dtype is timezone-aware + if "," in str(dtype): # A hacky way to decide if the dtype is timezone-aware. + # TODO(pandas>=2.1): Simplify the if-else statement. + if Version(pd.__version__) < Version("2.1") and dtype.startswith("timestamp"): + # pandas 2.0 doesn't have the dt.tz_convert method for pyarrow.Timestamp. + series = pd.to_datetime(series, utc=True) + else: + series = series.dt.tz_convert("UTC") + # Remove time zone information and preserve local time. + expected_series = series.dt.tz_localize(tz=None) + npt.assert_array_equal(result, np.array(expected_series, dtype=expected_dtype)) + + ######################################################################################## # Test the _to_numpy function with PyArrow arrays. # @@ -205,9 +545,11 @@ def test_to_numpy_pandas_series_pyarrow_dtypes_date(dtype, expected_dtype): # - int8, int16, int32, int64 # - uint8, uint16, uint32, uint64 # - float16, float32, float64 +# - String types: string/utf8, large_string/large_utf8, string_view # - Date types: # - date32[day] # - date64[ms] +# - Timestamp types: timestamp[unit], timestamp[unit, tz] # # In PyArrow, array types can be specified in two ways: # @@ -233,9 +575,9 @@ def test_to_numpy_pandas_series_pyarrow_dtypes_date(dtype, expected_dtype): pytest.param("float64", np.float64, id="float64"), ], ) -def test_to_numpy_pyarrow_array_pyarrow_dtypes_numeric(dtype, expected_dtype): +def test_to_numpy_pyarrow_numeric(dtype, expected_dtype): """ - Test the _to_numpy function with PyArrow arrays of PyArrow numeric types. + Test the _to_numpy function with PyArrow arrays of numeric types. """ data = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] if dtype == "float16": # float16 needs special handling @@ -264,9 +606,9 @@ def test_to_numpy_pyarrow_array_pyarrow_dtypes_numeric(dtype, expected_dtype): pytest.param("float64", np.float64, id="float64"), ], ) -def test_to_numpy_pyarrow_array_pyarrow_dtypes_numeric_with_na(dtype, expected_dtype): +def test_to_numpy_pyarrow_numeric_with_na(dtype, expected_dtype): """ - Test the _to_numpy function with PyArrow arrays of PyArrow numeric types and NA. + Test the _to_numpy function with PyArrow arrays of numeric types and NA. """ data = [1.0, 2.0, None, 4.0, 5.0, 6.0] if dtype == "float16": # float16 needs special handling @@ -278,6 +620,35 @@ def test_to_numpy_pyarrow_array_pyarrow_dtypes_numeric_with_na(dtype, expected_d npt.assert_array_equal(result, array) +@pytest.mark.skipif(not _HAS_PYARROW, reason="pyarrow is not installed") +@pytest.mark.parametrize( + "dtype", + [ + None, + "string", + "utf8", # alias for string + "large_string", + "large_utf8", # alias for large_string + pytest.param( + "string_view", + # TODO(pyarrow>=16): Remove the skipif marker for pyarrow<16. + marks=pytest.mark.skipif( + Version(pa.__version__) < Version("16"), + reason="string_view type was added since pyarrow 16", + ), + ), + ], +) +def test_to_numpy_pyarrow_string(dtype): + """ + Test the _to_numpy function with PyArrow arrays of string types. + """ + array = pa.array(["abc", "defg", "12345"], type=dtype) + result = _to_numpy(array) + _check_result(result, np.str_) + npt.assert_array_equal(result, array) + + @pytest.mark.skipif(not _HAS_PYARROW, reason="pyarrow is not installed") @pytest.mark.parametrize( ("dtype", "expected_dtype"), @@ -286,9 +657,9 @@ def test_to_numpy_pyarrow_array_pyarrow_dtypes_numeric_with_na(dtype, expected_d pytest.param("date64[ms]", "datetime64[ms]", id="date64[ms]"), ], ) -def test_to_numpy_pyarrow_array_pyarrow_dtypes_date(dtype, expected_dtype): +def test_to_numpy_pyarrow_date(dtype, expected_dtype): """ - Test the _to_numpy function with PyArrow arrays of PyArrow date types. + Test the _to_numpy function with PyArrow arrays of date32/date64 types. date32[day] and date64[ms] are stored as 32-bit and 64-bit integers, respectively, representing the number of days and milliseconds since the UNIX epoch (1970-01-01). @@ -296,9 +667,9 @@ def test_to_numpy_pyarrow_array_pyarrow_dtypes_date(dtype, expected_dtype): Here we explicitly check the dtype and date unit of the result. """ data = [ - date(2024, 1, 1), - datetime(2024, 1, 2), - datetime(2024, 1, 3), + datetime.date(2024, 1, 1), + datetime.datetime(2024, 1, 2), + datetime.datetime(2024, 1, 3), ] array = pa.array(data, type=dtype) result = _to_numpy(array) @@ -308,3 +679,47 @@ def test_to_numpy_pyarrow_array_pyarrow_dtypes_date(dtype, expected_dtype): result, np.array(["2024-01-01", "2024-01-02", "2024-01-03"], dtype=expected_dtype), ) + + +@pytest.mark.skipif(not _HAS_PYARROW, reason="pyarrow is not installed") +@pytest.mark.parametrize( + ("dtype", "expected_dtype"), + [ + pytest.param(None, "datetime64[us]", id="None"), + pytest.param("timestamp[s]", "datetime64[s]", id="timestamp[s]"), + pytest.param("timestamp[ms]", "datetime64[ms]", id="timestamp[ms]"), + pytest.param("timestamp[us]", "datetime64[us]", id="timestamp[us]"), + pytest.param("timestamp[ns]", "datetime64[ns]", id="timestamp[ns]"), + pytest.param( + pa.timestamp("s", tz="UTC"), "datetime64[s]", id="timestamp[s, tz=UTC]" + ), # pa.timestamp with tz has no string alias. + pytest.param( + pa.timestamp("s", tz="America/New_York"), + "datetime64[s]", + id="timestamp[s, tz=America/New_York]", + ), + pytest.param( + pa.timestamp("s", tz="+07:30"), + "datetime64[s]", + id="timestamp[s, tz=+07:30]", + ), + ], +) +def test_to_numpy_pyarrow_timestamp(dtype, expected_dtype): + """ + Test the _to_numpy function with PyArrow arrays of PyArrow timestamp types. + + pyarrow.timestamp(unit, tz=None) can accept units "s", "ms", "us", and "ns". + + Reference: https://arrow.apache.org/docs/python/generated/pyarrow.timestamp.html + """ + data = [ + datetime.datetime(2024, 1, 2, 3, 4, 5), + datetime.datetime(2024, 1, 2, 3, 4, 6), + ] + array = pa.array(data, type=dtype) + result = _to_numpy(array) + _check_result(result, np.datetime64) + assert result.dtype == expected_dtype + assert result[0] == np.datetime64("2024-01-02T03:04:05") + assert result[1] == np.datetime64("2024-01-02T03:04:06") diff --git a/pygmt/tests/test_clib_vectors_to_arrays.py b/pygmt/tests/test_clib_vectors_to_arrays.py index e59690069ca..b85912d48d6 100644 --- a/pygmt/tests/test_clib_vectors_to_arrays.py +++ b/pygmt/tests/test_clib_vectors_to_arrays.py @@ -69,17 +69,6 @@ def test_vectors_to_arrays_not_c_contiguous(): _check_arrays(arrays) -def test_vectors_to_arrays_pandas_nan(): - """ - Test the vectors_to_arrays function with pandas Series containing NaNs. - """ - vectors = [pd.Series(data=[0, 4, pd.NA, 8, 6], dtype=pd.Int32Dtype())] - arrays = vectors_to_arrays(vectors) - npt.assert_equal(arrays[0], np.array([0, 4, np.nan, 8, 6], dtype=np.float64)) - assert arrays[0].dtype == np.float64 - _check_arrays(arrays) - - @pytest.mark.skipif(not _HAS_PYARROW, reason="pyarrow is not installed.") def test_vectors_to_arrays_pyarrow_datetime(): """ diff --git a/pygmt/tests/test_clib_virtualfile_from_stringio.py b/pygmt/tests/test_clib_virtualfile_from_stringio.py index ce6de238a88..62daaa688c8 100644 --- a/pygmt/tests/test_clib_virtualfile_from_stringio.py +++ b/pygmt/tests/test_clib_virtualfile_from_stringio.py @@ -43,14 +43,9 @@ def test_virtualfile_from_stringio(): Test the virtualfile_from_stringio method. """ data = io.StringIO( - "# Comment\n" - "H 24p Legend\n" - "N 2\n" - "S 0.1i c 0.15i p300/12 0.25p 0.3i My circle\n" - ) - expected = ( - ">\n" "H 24p Legend\n" "N 2\n" "S 0.1i c 0.15i p300/12 0.25p 0.3i My circle\n" + "# Comment\nH 24p Legend\nN 2\nS 0.1i c 0.15i p300/12 0.25p 0.3i My circle\n" ) + expected = ">\nH 24p Legend\nN 2\nS 0.1i c 0.15i p300/12 0.25p 0.3i My circle\n" assert _stringio_to_dataset(data) == expected @@ -66,13 +61,7 @@ def test_one_segment(): "6 7 8 9 FGHIJK LMN OPQ\n" "RSTUVWXYZ\n" ) - expected = ( - "> Segment 1\n" - "1 2 3 ABC\n" - "4 5 DE\n" - "6 7 8 9 FGHIJK LMN OPQ\n" - "RSTUVWXYZ\n" - ) + expected = "> Segment 1\n1 2 3 ABC\n4 5 DE\n6 7 8 9 FGHIJK LMN OPQ\nRSTUVWXYZ\n" assert _stringio_to_dataset(data) == expected diff --git a/pygmt/tests/test_clib_virtualfile_from_vectors.py b/pygmt/tests/test_clib_virtualfile_from_vectors.py index 041bc7a803c..234ba01d7cc 100644 --- a/pygmt/tests/test_clib_virtualfile_from_vectors.py +++ b/pygmt/tests/test_clib_virtualfile_from_vectors.py @@ -11,6 +11,14 @@ from pygmt.clib.session import DTYPES_NUMERIC from pygmt.exceptions import GMTInvalidInput from pygmt.helpers import GMTTempFile +from pygmt.helpers.testing import skip_if_no + +try: + import pyarrow as pa + + pa_array = pa.array +except ImportError: + pa_array = None @pytest.fixture(scope="module", name="dtypes") @@ -53,17 +61,30 @@ def test_virtualfile_from_vectors(dtypes): @pytest.mark.benchmark -@pytest.mark.parametrize("dtype", [str, object]) -def test_virtualfile_from_vectors_one_string_or_object_column(dtype): - """ - Test passing in one column with string or object dtype into virtual file dataset. +@pytest.mark.parametrize( + ("array_func", "dtype"), + [ + pytest.param(np.array, {"dtype": np.str_}, id="str"), + pytest.param(np.array, {"dtype": np.object_}, id="object"), + pytest.param( + pa_array, + {}, # {"type": pa.string()} + marks=skip_if_no(package="pyarrow"), + id="pyarrow", + ), + ], +) +def test_virtualfile_from_vectors_one_string_or_object_column(array_func, dtype): + """ + Test passing in one column with string (numpy/pyarrow) or object (numpy) + dtype into virtual file dataset. """ size = 5 x = np.arange(size, dtype=np.int32) y = np.arange(size, size * 2, 1, dtype=np.int32) - strings = np.array(["a", "bc", "defg", "hijklmn", "opqrst"], dtype=dtype) + strings = array_func(["a", "bc", "defg", "hijklmn", "opqrst"], **dtype) with clib.Session() as lib: - with lib.virtualfile_from_vectors((x, y, strings)) as vfile: + with lib.virtualfile_from_vectors(vectors=(x, y, strings)) as vfile: with GMTTempFile() as outfile: lib.call_module("convert", [vfile, f"->{outfile.name}"]) output = outfile.read(keep_tabs=True) @@ -171,6 +192,8 @@ def test_virtualfile_from_vectors_arraylike(): assert output == expected +# TODO(PyGMT>=0.16.0): Remove this test in PyGMT v0.16.0 in which the "*args" parameter +# will be removed. def test_virtualfile_from_vectors_args(): """ Test the backward compatibility of the deprecated syntax for passing multiple diff --git a/pygmt/tests/test_clib_virtualfile_in.py b/pygmt/tests/test_clib_virtualfile_in.py index aac8e4af772..8a43c1dc273 100644 --- a/pygmt/tests/test_clib_virtualfile_in.py +++ b/pygmt/tests/test_clib_virtualfile_in.py @@ -105,6 +105,7 @@ def test_virtualfile_in_fail_non_valid_data(data): ) +# TODO(GMT>6.5.0): Remove the xfail marker for GMT<=6.5.0. @pytest.mark.xfail( condition=Version(__gmt_version__) <= Version("6.5.0"), reason="Upstream bug fixed in https://github.com/GenericMappingTools/gmt/pull/8600", @@ -127,33 +128,3 @@ def test_virtualfile_in_matrix_string_dtype(): assert output == "347.5 348.5 -30.5 -30\n" # Should check that lib.virtualfile_from_vectors is called once, # not lib.virtualfile_from_matrix, but it's technically complicated. - - -def test_virtualfile_from_data(): - """ - Test the backwards compatibility of the virtualfile_from_data method. - - This test is the same as test_virtualfile_in_required_z_matrix, but using the - deprecated method. - """ - shape = (5, 3) - dataframe = pd.DataFrame( - data=np.arange(shape[0] * shape[1]).reshape(shape), columns=["x", "y", "z"] - ) - data = np.array(dataframe) - with clib.Session() as lib: - with pytest.warns(FutureWarning, match="virtualfile_from_data"): - with lib.virtualfile_from_data( - data=data, required_z=True, check_kind="vector" - ) as vfile: - with GMTTempFile() as outfile: - lib.call_module("info", [vfile, f"->{outfile.name}"]) - output = outfile.read(keep_tabs=True) - bounds = "\t".join( - [ - f"<{i.min():.0f}/{i.max():.0f}>" - for i in (dataframe.x, dataframe.y, dataframe.z) - ] - ) - expected = f": N = {shape[0]}\t{bounds}\n" - assert output == expected diff --git a/pygmt/tests/test_clib_virtualfiles.py b/pygmt/tests/test_clib_virtualfiles.py index a45a662de71..ba48200deae 100644 --- a/pygmt/tests/test_clib_virtualfiles.py +++ b/pygmt/tests/test_clib_virtualfiles.py @@ -107,33 +107,3 @@ def test_open_virtualfile_bad_direction(): with pytest.raises(GMTInvalidInput): with lib.open_virtualfile(*vfargs): pass - - -def test_open_virtual_file(): - """ - Test the deprecated Session.open_virtual_file method. - - This test is the same as test_open_virtualfile, but using the deprecated method. - """ - shape = (5, 3) - with clib.Session() as lib: - family = "GMT_IS_DATASET|GMT_VIA_MATRIX" - geometry = "GMT_IS_POINT" - dataset = lib.create_data( - family=family, - geometry=geometry, - mode="GMT_CONTAINER_ONLY", - dim=[shape[1], shape[0], 1, 0], # columns, rows, layers, dtype - ) - data = np.arange(shape[0] * shape[1]).reshape(shape) - lib.put_matrix(dataset, matrix=data) - # Add the dataset to a virtual file and pass it along to gmt info - with pytest.warns(FutureWarning, match="open_virtual_file"): - vfargs = (family, geometry, "GMT_IN|GMT_IS_REFERENCE", dataset) - with lib.open_virtual_file(*vfargs) as vfile: - with GMTTempFile() as outfile: - lib.call_module("info", [vfile, f"->{outfile.name}"]) - output = outfile.read(keep_tabs=True) - bounds = "\t".join([f"<{col.min():.0f}/{col.max():.0f}>" for col in data.T]) - expected = f": N = {shape[0]}\t{bounds}\n" - assert output == expected diff --git a/pygmt/tests/test_contour.py b/pygmt/tests/test_contour.py index c359c1d124c..2cd2cc2c5ee 100644 --- a/pygmt/tests/test_contour.py +++ b/pygmt/tests/test_contour.py @@ -2,6 +2,7 @@ Test Figure.contour. """ +import platform from pathlib import Path import numpy as np @@ -30,6 +31,7 @@ def fixture_region(): @pytest.mark.mpl_image_compare +@pytest.mark.xfail(platform.machine() == "aarch64", reason="Fails on Linux ARM64") def test_contour_vec(region): """ Plot an x-centered gaussian kernel with different y scale. @@ -130,6 +132,7 @@ def test_contour_multiple_levels(region): @pytest.mark.mpl_image_compare(filename="test_contour_vec.png") +@pytest.mark.xfail(platform.machine() == "aarch64", reason="Fails on Linux ARM64") def test_contour_incols_transposed_data(region): """ Make sure that transposing the data matrix still produces a correct result with diff --git a/pygmt/tests/test_datasets_earth_deflection.py b/pygmt/tests/test_datasets_earth_deflection.py new file mode 100644 index 00000000000..9118779a379 --- /dev/null +++ b/pygmt/tests/test_datasets_earth_deflection.py @@ -0,0 +1,106 @@ +""" +Test basic functionality for loading IGPP Earth east-west and south-north deflection +datasets. +""" + +import numpy as np +import numpy.testing as npt +from pygmt.datasets import load_earth_deflection + + +def test_earth_edefl_01d(): + """ + Test some properties of the Earth east-west deflection 01d data. + """ + data = load_earth_deflection(resolution="01d") + assert data.name == "z" + assert data.attrs["long_name"] == "edefl (microradians)" + assert data.attrs["description"] == "IGPP Earth east-west deflection" + assert data.attrs["units"] == "micro-radians" + assert data.attrs["horizontal_datum"] == "WGS84" + assert data.shape == (181, 361) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-90, 91, 1)) + npt.assert_allclose(data.lon, np.arange(-180, 181, 1)) + npt.assert_allclose(data.min(), -142.64, atol=0.04) + npt.assert_allclose(data.max(), 178.32, atol=0.04) + + +def test_earth_edefl_01d_with_region(): + """ + Test loading low-resolution Earth east-west deflection with "region". + """ + data = load_earth_deflection(resolution="01d", region=[-10, 10, -5, 5]) + assert data.shape == (11, 21) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-5, 6, 1)) + npt.assert_allclose(data.lon, np.arange(-10, 11, 1)) + npt.assert_allclose(data.min(), -28.92, atol=0.04) + npt.assert_allclose(data.max(), 24.72, atol=0.04) + + +def test_earth_edefl_01m_default_registration(): + """ + Test that the grid returned by default for the 1 arc-minute resolution has a "pixel" + registration. + """ + data = load_earth_deflection(resolution="01m", region=[-10, -9, 3, 5]) + assert data.shape == (120, 60) + assert data.gmt.registration == 1 + npt.assert_allclose(data.coords["lat"].data.min(), 3.008333333) + npt.assert_allclose(data.coords["lat"].data.max(), 4.991666666) + npt.assert_allclose(data.coords["lon"].data.min(), -9.99166666) + npt.assert_allclose(data.coords["lon"].data.max(), -9.00833333) + npt.assert_allclose(data.min(), -62.24, atol=0.04) + npt.assert_allclose(data.max(), 15.52, atol=0.04) + + +def test_earth_ndefl_01d(): + """ + Test some properties of the Earth north-south deflection 01d data. + """ + data = load_earth_deflection(resolution="01d", component="north") + assert data.name == "z" + assert data.attrs["long_name"] == "ndefl (microradians)" + assert data.attrs["description"] == "IGPP Earth north-south deflection" + assert data.attrs["units"] == "micro-radians" + assert data.attrs["horizontal_datum"] == "WGS84" + assert data.shape == (181, 361) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-90, 91, 1)) + npt.assert_allclose(data.lon, np.arange(-180, 181, 1)) + npt.assert_allclose(data.min(), -214.8, atol=0.04) + npt.assert_allclose(data.max(), 163.04, atol=0.04) + + +def test_earth_ndefl_01d_with_region(): + """ + Test loading low-resolution Earth north-south deflection with "region". + """ + data = load_earth_deflection( + resolution="01d", region=[-10, 10, -5, 5], component="north" + ) + assert data.shape == (11, 21) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-5, 6, 1)) + npt.assert_allclose(data.lon, np.arange(-10, 11, 1)) + npt.assert_allclose(data.min(), -48.08, atol=0.04) + npt.assert_allclose(data.max(), 18.92, atol=0.04) + + +def test_earth_ndefl_01m_default_registration(): + """ + Test that the grid returned by default for the 1 arc-minute resolution has a "pixel" + registration. + """ + data = load_earth_deflection( + resolution="01m", region=[-10, -9, 3, 5], component="north" + ) + assert data.shape == (120, 60) + assert data.gmt.registration == 1 + npt.assert_allclose(data.coords["lat"].data.min(), 3.008333333) + npt.assert_allclose(data.coords["lat"].data.max(), 4.991666666) + npt.assert_allclose(data.coords["lon"].data.min(), -9.99166666) + npt.assert_allclose(data.coords["lon"].data.max(), -9.00833333) + npt.assert_allclose(data.min(), -107.04, atol=0.04) + npt.assert_allclose(data.max(), 20.28, atol=0.04) diff --git a/pygmt/tests/test_datasets_earth_dist.py b/pygmt/tests/test_datasets_earth_dist.py new file mode 100644 index 00000000000..a5f61a0b5f2 --- /dev/null +++ b/pygmt/tests/test_datasets_earth_dist.py @@ -0,0 +1,53 @@ +""" +Test basic functionality for loading Earth distance to shoreline datasets. +""" + +import numpy as np +import numpy.testing as npt +from pygmt.datasets import load_earth_dist + + +def test_earth_dist_01d(): + """ + Test some properties of the Earth distance to shoreline 01d data. + """ + data = load_earth_dist(resolution="01d") + assert data.name == "z" + assert data.attrs["description"] == "GSHHG Earth distance to shoreline" + assert data.attrs["units"] == "kilometers" + assert data.attrs["horizontal_datum"] == "WGS84" + assert data.shape == (181, 361) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-90, 91, 1)) + npt.assert_allclose(data.lon, np.arange(-180, 181, 1)) + npt.assert_allclose(data.min(), -2655.7, atol=0.01) + npt.assert_allclose(data.max(), 2463.42, atol=0.01) + + +def test_earth_dist_01d_with_region(): + """ + Test loading low-resolution Earth distance to shoreline with "region". + """ + data = load_earth_dist(resolution="01d", region=[-10, 10, -5, 5]) + assert data.shape == (11, 21) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-5, 6, 1)) + npt.assert_allclose(data.lon, np.arange(-10, 11, 1)) + npt.assert_allclose(data.min(), -1081.94, atol=0.01) + npt.assert_allclose(data.max(), 105.18, atol=0.01) + + +def test_earth_dist_01m_default_registration(): + """ + Test that the grid returned by default for the 1 arc-minute resolution has a + "gridline" registration. + """ + data = load_earth_dist(resolution="01m", region=[-10, -9, 3, 5]) + assert data.shape == (121, 61) + assert data.gmt.registration == 0 + assert data.coords["lat"].data.min() == 3.0 + assert data.coords["lat"].data.max() == 5.0 + assert data.coords["lon"].data.min() == -10.0 + assert data.coords["lon"].data.max() == -9.0 + npt.assert_allclose(data.min(), -243.62, atol=0.01) + npt.assert_allclose(data.max(), 2.94, atol=0.01) diff --git a/pygmt/tests/test_datasets_earth_free_air_anomaly.py b/pygmt/tests/test_datasets_earth_free_air_anomaly.py index 517a1bb9b89..7ce5fc2662c 100644 --- a/pygmt/tests/test_datasets_earth_free_air_anomaly.py +++ b/pygmt/tests/test_datasets_earth_free_air_anomaly.py @@ -52,3 +52,54 @@ def test_earth_faa_01m_default_registration(): npt.assert_allclose(data.coords["lon"].data.max(), -9.00833333) npt.assert_allclose(data.min(), -49.225, atol=0.025) npt.assert_allclose(data.max(), 115.0, atol=0.025) + + +def test_earth_faaerror_01d(): + """ + Test some properties of the free air anomaly error 01d data. + """ + data = load_earth_free_air_anomaly(resolution="01d", uncertainty=True) + assert data.name == "z" + assert data.attrs["long_name"] == "faaerror (mGal)" + assert data.attrs["description"] == "IGPP Earth free-air anomaly errors" + assert data.attrs["units"] == "mGal" + assert data.attrs["horizontal_datum"] == "WGS84" + assert data.shape == (181, 361) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-90, 91, 1)) + npt.assert_allclose(data.lon, np.arange(-180, 181, 1)) + npt.assert_allclose(data.min(), 0.0, atol=0.04) + npt.assert_allclose(data.max(), 49.16, atol=0.04) + + +def test_earth_faaerror_01d_with_region(): + """ + Test loading low-resolution earth free air anomaly error with 'region'. + """ + data = load_earth_free_air_anomaly( + resolution="01d", region=[-10, 10, -5, 5], uncertainty=True + ) + assert data.shape == (11, 21) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-5, 6, 1)) + npt.assert_allclose(data.lon, np.arange(-10, 11, 1)) + npt.assert_allclose(data.min(), 0.72, atol=0.04) + npt.assert_allclose(data.max(), 21.04, atol=0.04) + + +def test_earth_faaerror_01m_default_registration(): + """ + Test that the grid returned by default for the 1 arc-minute resolution has a "pixel" + registration. + """ + data = load_earth_free_air_anomaly( + resolution="01m", region=[-10, -9, 3, 5], uncertainty=True + ) + assert data.shape == (120, 60) + assert data.gmt.registration == 1 + npt.assert_allclose(data.coords["lat"].data.min(), 3.008333333) + npt.assert_allclose(data.coords["lat"].data.max(), 4.991666666) + npt.assert_allclose(data.coords["lon"].data.min(), -9.99166666) + npt.assert_allclose(data.coords["lon"].data.max(), -9.00833333) + npt.assert_allclose(data.min(), 0.40, atol=0.04) + npt.assert_allclose(data.max(), 13.36, atol=0.04) diff --git a/pygmt/tests/test_datasets_earth_geoid.py b/pygmt/tests/test_datasets_earth_geoid.py index 84bfc5d7bf4..af72969b032 100644 --- a/pygmt/tests/test_datasets_earth_geoid.py +++ b/pygmt/tests/test_datasets_earth_geoid.py @@ -15,7 +15,7 @@ def test_earth_geoid_01d(): assert data.name == "z" assert data.attrs["long_name"] == "geoid (m)" assert data.attrs["description"] == "EGM2008 Earth geoid" - assert data.attrs["units"] == "m" + assert data.attrs["units"] == "meters" assert data.attrs["horizontal_datum"] == "WGS84" assert data.shape == (181, 361) assert data.gmt.registration == 0 diff --git a/pygmt/tests/test_datasets_earth_mean_sea_surface.py b/pygmt/tests/test_datasets_earth_mean_sea_surface.py new file mode 100644 index 00000000000..84b2b7123cc --- /dev/null +++ b/pygmt/tests/test_datasets_earth_mean_sea_surface.py @@ -0,0 +1,53 @@ +""" +Test basic functionality for loading Earth mean sea surface datasets. +""" + +import numpy as np +import numpy.testing as npt +from pygmt.datasets import load_earth_mean_sea_surface + + +def test_earth_mss_01d(): + """ + Test some properties of the Earth mean sea surface 01d data. + """ + data = load_earth_mean_sea_surface(resolution="01d") + assert data.name == "z" + assert data.attrs["description"] == "CNES Earth mean sea surface" + assert data.attrs["units"] == "meters" + assert data.attrs["horizontal_datum"] == "WGS84" + assert data.shape == (181, 361) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-90, 91, 1)) + npt.assert_allclose(data.lon, np.arange(-180, 181, 1)) + npt.assert_allclose(data.min(), -104.71, atol=0.01) + npt.assert_allclose(data.max(), 82.38, atol=0.01) + + +def test_earth_mss_01d_with_region(): + """ + Test loading low-resolution Earth mean sea surface with "region". + """ + data = load_earth_mean_sea_surface(resolution="01d", region=[-10, 10, -5, 5]) + assert data.shape == (11, 21) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-5, 6, 1)) + npt.assert_allclose(data.lon, np.arange(-10, 11, 1)) + npt.assert_allclose(data.min(), 6.53, atol=0.01) + npt.assert_allclose(data.max(), 29.31, atol=0.01) + + +def test_earth_mss_01m_default_registration(): + """ + Test that the grid returned by default for the 1 arc-minute resolution has a + "gridline" registration. + """ + data = load_earth_mean_sea_surface(resolution="01m", region=[-10, -9, 3, 5]) + assert data.shape == (121, 61) + assert data.gmt.registration == 0 + assert data.coords["lat"].data.min() == 3.0 + assert data.coords["lat"].data.max() == 5.0 + assert data.coords["lon"].data.min() == -10.0 + assert data.coords["lon"].data.max() == -9.0 + npt.assert_allclose(data.min(), 21.27, atol=0.01) + npt.assert_allclose(data.max(), 31.11, atol=0.01) diff --git a/pygmt/tests/test_datasets_earth_relief.py b/pygmt/tests/test_datasets_earth_relief.py index 44af0e4ff98..b0851430907 100644 --- a/pygmt/tests/test_datasets_earth_relief.py +++ b/pygmt/tests/test_datasets_earth_relief.py @@ -192,6 +192,7 @@ def test_earth_relief_15s_default_registration(): npt.assert_allclose(data.max(), -76.5, atol=0.5) +# TODO(GMT X.Y.Z): Upstream bug which is not fixed yet. @pytest.mark.xfail( condition=Version(__gmt_version__) >= Version("6.5.0"), reason="Upstream bug tracked in https://github.com/GenericMappingTools/pygmt/issues/2511", diff --git a/pygmt/tests/test_datasets_earth_vertical_gravity_gradient.py b/pygmt/tests/test_datasets_earth_vertical_gravity_gradient.py index 84fda47f8f7..67070e46541 100644 --- a/pygmt/tests/test_datasets_earth_vertical_gravity_gradient.py +++ b/pygmt/tests/test_datasets_earth_vertical_gravity_gradient.py @@ -23,7 +23,7 @@ def test_earth_vertical_gravity_gradient_01d(): npt.assert_allclose(data.lon, np.arange(-180, 181, 1)) npt.assert_allclose(data.min(), -40.1875, atol=1 / 32) npt.assert_allclose(data.max(), 45.96875, atol=1 / 32) - assert data[1, 1].isnull() # noqa: PD003 # ruff's bug + assert data[1, 1].isnull() def test_earth_vertical_gravity_gradient_01d_with_region(): diff --git a/pygmt/tests/test_datasets_mean_dynamic_topography.py b/pygmt/tests/test_datasets_mean_dynamic_topography.py new file mode 100644 index 00000000000..deae6e90a60 --- /dev/null +++ b/pygmt/tests/test_datasets_mean_dynamic_topography.py @@ -0,0 +1,53 @@ +""" +Test basic functionality for loading Earth mean dynamic topography datasets. +""" + +import numpy as np +import numpy.testing as npt +from pygmt.datasets import load_earth_mean_dynamic_topography + + +def test_earth_mdt_01d(): + """ + Test some properties of the Earth mean dynamic topography 01d data. + """ + data = load_earth_mean_dynamic_topography(resolution="01d") + assert data.name == "z" + assert data.attrs["description"] == "CNES Earth mean dynamic topography" + assert data.attrs["units"] == "meters" + assert data.attrs["horizontal_datum"] == "WGS84" + assert data.shape == (181, 361) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-90, 91, 1)) + npt.assert_allclose(data.lon, np.arange(-180, 181, 1)) + npt.assert_allclose(data.min(), -1.4668, atol=0.0001) + npt.assert_allclose(data.max(), 1.7151, atol=0.0001) + + +def test_earth_mdt_01d_with_region(): + """ + Test loading low-resolution Earth mean dynamic topography with "region". + """ + data = load_earth_mean_dynamic_topography(resolution="01d", region=[-10, 10, -5, 5]) + assert data.shape == (11, 21) + assert data.gmt.registration == 0 + npt.assert_allclose(data.lat, np.arange(-5, 6, 1)) + npt.assert_allclose(data.lon, np.arange(-10, 11, 1)) + npt.assert_allclose(data.min(), 0.346, atol=0.0001) + npt.assert_allclose(data.max(), 0.4839, atol=0.0001) + + +def test_earth_mdt_07m_default_registration(): + """ + Test that the grid returned by default for the 7 arc-minutes resolution has a + "gridline" registration. + """ + data = load_earth_mean_dynamic_topography(resolution="07m", region=[-10, -9, 3, 5]) + assert data.shape == (17, 9) + assert data.gmt.registration == 0 + assert data.coords["lat"].data.min() == 3.0 + assert data.coords["lat"].data.max() == 5.0 + assert data.coords["lon"].data.min() == -10.0 + assert data.coords["lon"].data.max() == -9.0 + npt.assert_allclose(data.min(), 0.4138, atol=0.0001) + npt.assert_allclose(data.max(), 0.4302, atol=0.0001) diff --git a/pygmt/tests/test_grdimage.py b/pygmt/tests/test_grdimage.py index 943b3f12ddc..907886eb0b3 100644 --- a/pygmt/tests/test_grdimage.py +++ b/pygmt/tests/test_grdimage.py @@ -256,6 +256,7 @@ def test_grdimage_imgout_fails(grid): fig.grdimage(grid, A="out.png") +# TODO(GMT>6.5.0): Remove the xfail marker for GMT<=6.5.0. @pytest.mark.xfail( condition=Version(__gmt_version__) <= Version("6.5.0"), reason="Upstream bug fixed in https://github.com/GenericMappingTools/gmt/pull/8554", diff --git a/pygmt/tests/test_grdview.py b/pygmt/tests/test_grdview.py index f73b1150e54..3be4ed7aa42 100644 --- a/pygmt/tests/test_grdview.py +++ b/pygmt/tests/test_grdview.py @@ -161,7 +161,7 @@ def test_grdview_with_perspective_and_zaxis_frame(xrgrid, region): a Transverse Mercator (T) projection. """ fig = Figure() - projection = f"T{(region[0]+region[1])/2}/{abs((region[2]+region[3])/2)}" + projection = f"T{(region[0] + region[1]) / 2}/{abs((region[2] + region[3]) / 2)}" fig.grdview( grid=xrgrid, projection=projection, diff --git a/pygmt/tests/test_helpers.py b/pygmt/tests/test_helpers.py index 717708f21e9..92013d3ffb9 100644 --- a/pygmt/tests/test_helpers.py +++ b/pygmt/tests/test_helpers.py @@ -194,13 +194,16 @@ def test_launch_external_viewer_win32(): mock_startfile.assert_called_once_with("preview.png") -def test_launch_external_viewer_unknown_os(): +@pytest.mark.parametrize("fname", ["preview.png", "/full/path/to/preview.png"]) +def test_launch_external_viewer_unknown_os(fname): """ Test that launch_external_viewer uses the webbrowser module as a fallback. """ with ( - patch("webbrowser.open_new_tab") as mock_open, patch("sys.platform", "unknown"), + patch("webbrowser.open_new_tab") as mock_open, ): - launch_external_viewer("preview.png") - mock_open.assert_called_once_with("file://preview.png") + launch_external_viewer(fname) + fullpath = Path(fname).resolve() + assert fullpath.is_absolute() + mock_open.assert_called_once_with(f"file://{fullpath}") diff --git a/pygmt/tests/test_hlines.py b/pygmt/tests/test_hlines.py new file mode 100644 index 00000000000..aaaddad4f08 --- /dev/null +++ b/pygmt/tests/test_hlines.py @@ -0,0 +1,121 @@ +""" +Tests for Figure.hlines. +""" + +import pytest +from pygmt import Figure +from pygmt.exceptions import GMTInvalidInput + + +@pytest.mark.mpl_image_compare +def test_hlines_one_line(): + """ + Plot one horizontal line. + """ + fig = Figure() + fig.basemap(region=[0, 10, 0, 10], projection="X10c/10c", frame=True) + fig.hlines(1) + fig.hlines(2, xmin=1) + fig.hlines(3, xmax=9) + fig.hlines(4, xmin=3, xmax=8) + fig.hlines(5, xmin=4, xmax=8, pen="1p,blue", label="Line at y=5") + fig.hlines(6, xmin=5, xmax=7, pen="1p,red", label="Line at y=6") + fig.legend() + return fig + + +@pytest.mark.mpl_image_compare +def test_hlines_multiple_lines(): + """ + Plot multiple horizontal lines. + """ + fig = Figure() + fig.basemap(region=[0, 10, 0, 16], projection="X10c/10c", frame=True) + fig.hlines([1, 2]) + fig.hlines([3, 4, 5], xmin=[1, 2, 3]) + fig.hlines([6, 7, 8], xmax=[7, 8, 9]) + fig.hlines([9, 10], xmin=[1, 2], xmax=[9, 10]) + fig.hlines([11, 12], xmin=1, xmax=9, pen="1p,blue", label="Lines at y=11,12") + fig.hlines( + [13, 14], xmin=[3, 4], xmax=[8, 9], pen="1p,red", label="Lines at y=13,14" + ) + fig.legend() + return fig + + +@pytest.mark.mpl_image_compare +def test_hlines_clip(): + """ + Plot horizontal lines with clipping or not. + """ + fig = Figure() + fig.basemap(region=[0, 10, 0, 4], projection="X10c/4c", frame=True) + fig.hlines(1, xmin=-2, xmax=12) + fig.hlines(2, xmin=-2, xmax=12, no_clip=True) + return fig + + +@pytest.mark.mpl_image_compare +@pytest.mark.parametrize("region", ["g", "d"]) +def test_hlines_geographic_global(region): + """ + Plot horizontal lines in geographic coordinates. + """ + fig = Figure() + fig.basemap(region=region, projection="R15c", frame=True) + # Plot lines with longitude range of 0 to 360. + fig.hlines(10, pen="1p") + fig.hlines(20, xmin=0, xmax=360, pen="1p") + fig.hlines(30, xmin=0, xmax=180, pen="1p") + fig.hlines(40, xmin=180, xmax=360, pen="1p") + fig.hlines(50, xmin=0, xmax=90, pen="1p") + fig.hlines(60, xmin=90, xmax=180, pen="1p") + fig.hlines(70, xmin=180, xmax=270, pen="1p") + fig.hlines(80, xmin=270, xmax=360, pen="1p") + + # Plot lines with longitude range of -180 to 180. + fig.hlines(-10, pen="1p,red") + fig.hlines(-20, xmin=-180, xmax=180, pen="1p,red") + fig.hlines(-30, xmin=-180, xmax=0, pen="1p,red") + fig.hlines(-40, xmin=0, xmax=180, pen="1p,red") + fig.hlines(-50, xmin=-180, xmax=-90, pen="1p,red") + fig.hlines(-60, xmin=-90, xmax=0, pen="1p,red") + fig.hlines(-70, xmin=0, xmax=90, pen="1p,red") + fig.hlines(-80, xmin=90, xmax=180, pen="1p,red") + return fig + + +@pytest.mark.mpl_image_compare +def test_hlines_polar_projection(): + """ + Plot horizontal lines in polar projection. + """ + fig = Figure() + fig.basemap(region=[0, 360, 0, 1], projection="P15c", frame=True) + fig.hlines(0.1, pen="1p") + fig.hlines(0.2, xmin=0, xmax=360, pen="1p") + fig.hlines(0.3, xmin=0, xmax=180, pen="1p") + fig.hlines(0.4, xmin=180, xmax=360, pen="1p") + fig.hlines(0.5, xmin=0, xmax=90, pen="1p") + fig.hlines(0.6, xmin=90, xmax=180, pen="1p") + fig.hlines(0.7, xmin=180, xmax=270, pen="1p") + fig.hlines(0.8, xmin=270, xmax=360, pen="1p") + return fig + + +def test_hlines_invalid_input(): + """ + Test invalid input for hlines. + """ + fig = Figure() + fig.basemap(region=[0, 10, 0, 6], projection="X10c/6c", frame=True) + with pytest.raises(GMTInvalidInput): + fig.hlines(1, xmin=2, xmax=[3, 4]) + with pytest.raises(GMTInvalidInput): + fig.hlines(1, xmin=[2, 3], xmax=4) + with pytest.raises(GMTInvalidInput): + fig.hlines(1, xmin=[2, 3], xmax=[4, 5]) + with pytest.raises(GMTInvalidInput): + fig.hlines([1, 2], xmin=[2, 3, 4], xmax=3) + with pytest.raises(GMTInvalidInput): + fig.hlines([1, 2], xmin=[2, 3], xmax=[4, 5, 6]) diff --git a/pygmt/tests/test_info.py b/pygmt/tests/test_info.py index 3ac9f27c4e1..d055abb61ec 100644 --- a/pygmt/tests/test_info.py +++ b/pygmt/tests/test_info.py @@ -23,10 +23,7 @@ def test_info(): """ output = info(data=POINTS_DATA) expected_output = ( - f"{POINTS_DATA}: N = 20 " - "<11.5309/61.7074> " - "<-2.9289/7.8648> " - "<0.1412/0.9338>\n" + f"{POINTS_DATA}: N = 20 <11.5309/61.7074> <-2.9289/7.8648> <0.1412/0.9338>\n" ) assert output == expected_output @@ -57,10 +54,7 @@ def test_info_path(table): """ output = info(data=table) expected_output = ( - f"{POINTS_DATA}: N = 20 " - "<11.5309/61.7074> " - "<-2.9289/7.8648> " - "<0.1412/0.9338>\n" + f"{POINTS_DATA}: N = 20 <11.5309/61.7074> <-2.9289/7.8648> <0.1412/0.9338>\n" ) assert output == expected_output diff --git a/pygmt/tests/test_meca.py b/pygmt/tests/test_meca.py index 2e71cc9665d..424486ac408 100644 --- a/pygmt/tests/test_meca.py +++ b/pygmt/tests/test_meca.py @@ -143,6 +143,7 @@ def test_meca_spec_multiple_focalmecha(inputtype): return fig +# TODO(GMT>=6.5.0): Remove the skipif condition for GMT>=6.5.0. @pytest.mark.mpl_image_compare(filename="test_meca_offset.png") @pytest.mark.parametrize( "inputtype", @@ -201,8 +202,9 @@ def test_meca_offset(inputtype): return fig -# Passing event names via pandas doesn't work for GMT<=6.4, thus marked as -# xfail. See https://github.com/GenericMappingTools/pygmt/issues/2524. +# TODO(GMT>=6.5.0): Remove the skipif marker for GMT>=6.5.0. +# Passing event names via pandas doesn't work for GMT<=6.4. +# See https://github.com/GenericMappingTools/pygmt/issues/2524. @pytest.mark.mpl_image_compare(filename="test_meca_eventname.png") @pytest.mark.parametrize( "inputtype", diff --git a/pygmt/tests/test_plot.py b/pygmt/tests/test_plot.py index 721b7841307..c2f2b846724 100644 --- a/pygmt/tests/test_plot.py +++ b/pygmt/tests/test_plot.py @@ -467,9 +467,14 @@ def test_plot_datetime(): fig.plot(x=x, y=y, style="a0.2c", pen="1p") # the Python built-in datetime and date - x = [datetime.date(2018, 1, 1), datetime.datetime(2019, 1, 1)] + x = [datetime.date(2018, 1, 1), datetime.datetime(2019, 1, 1, 0, 0, 0)] y = [8.5, 9.5] fig.plot(x=x, y=y, style="i0.2c", pen="1p") + + # Python sequence of pd.Timestamp + x = [pd.Timestamp("2018-01-01"), pd.Timestamp("2019-01-01")] + y = [5.5, 6.5] + fig.plot(x=x, y=y, style="d0.2c", pen="1p") return fig diff --git a/pygmt/tests/test_text.py b/pygmt/tests/test_text.py index 64781c514bc..22b446302b8 100644 --- a/pygmt/tests/test_text.py +++ b/pygmt/tests/test_text.py @@ -6,9 +6,17 @@ import numpy as np import pytest -from pygmt import Figure +from pygmt import Figure, config from pygmt.exceptions import GMTCLibError, GMTInvalidInput from pygmt.helpers import GMTTempFile +from pygmt.helpers.testing import skip_if_no + +try: + import pyarrow as pa + + pa_array = pa.array +except ImportError: + pa_array = None TEST_DATA_DIR = Path(__file__).parent / "data" POINTS_DATA = TEST_DATA_DIR / "points.txt" @@ -48,8 +56,16 @@ def test_text_single_line_of_text(region, projection): @pytest.mark.benchmark -@pytest.mark.mpl_image_compare -def test_text_multiple_lines_of_text(region, projection): +@pytest.mark.mpl_image_compare(filename="test_text_multiple_lines_of_text.png") +@pytest.mark.parametrize( + "array_func", + [ + list, + pytest.param(np.array, id="numpy"), + pytest.param(pa_array, marks=skip_if_no(package="pyarrow"), id="pyarrow"), + ], +) +def test_text_multiple_lines_of_text(region, projection, array_func): """ Place multiple lines of text at their respective x, y locations. """ @@ -59,7 +75,7 @@ def test_text_multiple_lines_of_text(region, projection): projection=projection, x=[1.2, 1.6], y=[0.6, 0.3], - text=["This is a line of text", "This is another line of text"], + text=array_func(["This is a line of text", "This is another line of text"]), ) return fig @@ -410,12 +426,17 @@ def test_text_nonstr_text(): return fig -@pytest.mark.mpl_image_compare -def test_text_nonascii(): +@pytest.mark.mpl_image_compare(filename="test_text_nonascii.png") +@pytest.mark.parametrize("encoding", ["ISOLatin1+", "Standard+"]) +def test_text_nonascii(encoding): """ Test passing text strings with non-ascii characters. + + Default PS_CHAR_ENCODING setting should not affect the result. """ fig = Figure() + if encoding == "Standard+": # Temporarily set the PS_CHAR_ENCODING to "Standard+". + config(PS_CHAR_ENCODING="Standard+") fig.basemap(region=[0, 10, 0, 10], projection="X10c", frame=True) fig.text(position="TL", text="position-text:°α") # noqa: RUF001 fig.text(x=1, y=1, text="xytext:°α") # noqa: RUF001 diff --git a/pygmt/tests/test_tilemap.py b/pygmt/tests/test_tilemap.py index 704a96093af..f03c39e3b9b 100644 --- a/pygmt/tests/test_tilemap.py +++ b/pygmt/tests/test_tilemap.py @@ -2,14 +2,24 @@ Test Figure.tilemap. """ +import importlib +from unittest.mock import patch + import pytest from pygmt import Figure -contextily = pytest.importorskip("contextily") -rioxarray = pytest.importorskip("rioxarray") +try: + import contextily + + _HAS_CONTEXTILY = True +except ImportError: + _HAS_CONTEXTILY = False + +_HAS_RIOXARRAY = bool(importlib.util.find_spec("rioxarray")) @pytest.mark.mpl_image_compare +@pytest.mark.skipif(not _HAS_CONTEXTILY, reason="contextily is not installed") def test_tilemap_web_mercator(): """ Create a tilemap plot in Spherical Mercator projection (EPSG:3857). @@ -27,9 +37,13 @@ def test_tilemap_web_mercator(): @pytest.mark.benchmark @pytest.mark.mpl_image_compare -def test_tilemap_ogc_wgs84(): +@pytest.mark.skipif( + not (_HAS_CONTEXTILY and _HAS_RIOXARRAY), + reason="contextily and rioxarray are not installed", +) +def test_tilemap_ogc_crs84(): """ - Create a tilemap plot using longitude/latitude coordinates (OGC:WGS84), centred on + Create a tilemap plot using longitude/latitude coordinates (OGC:CRS84), centred on the international date line. """ fig = Figure() @@ -45,6 +59,10 @@ def test_tilemap_ogc_wgs84(): @pytest.mark.mpl_image_compare @pytest.mark.parametrize("no_clip", [False, True]) +@pytest.mark.skipif( + not (_HAS_CONTEXTILY and _HAS_RIOXARRAY), + reason="contextily and rioxarray are not installed", +) def test_tilemap_no_clip(no_clip): """ Create a tilemap plot clipped to the Southern Hemisphere when no_clip is False, but @@ -60,3 +78,34 @@ def test_tilemap_no_clip(no_clip): no_clip=no_clip, ) return fig + + +@pytest.mark.skipif(_HAS_CONTEXTILY, reason="contextily is installed.") +def test_tilemap_no_contextily(): + """ + Raise an ImportError when contextily is not installed. + """ + fig = Figure() + with pytest.raises(ImportError, match="Package `contextily` is required"): + fig.tilemap( + region=[-20000000.0, 20000000.0, -20000000.0, 20000000.0], + zoom=0, + lonlat=False, + frame="afg", + ) + + +@pytest.mark.skipif(_HAS_RIOXARRAY, reason="rioxarray is installed.") +def test_tilemap_no_rioxarray(): + """ + Raise an ImportError when rioxarray is not installed and contextily is installed. + """ + fig = Figure() + # In our CI, contextily and rioxarray are installed together, so we will see the + # error about contextily, not rioxarray. Here we mock contextily as installed, to + # make sure that we see the rioxarray error message when rioxarray is not installed. + with patch("pygmt.datasets.tile_map._HAS_CONTEXTILY", True): + with pytest.raises(ImportError, match="Package `rioxarray` is required"): + fig.tilemap( + region=[-180.0, 180.0, -90, 90], zoom=0, lonlat=True, frame="afg" + ) diff --git a/pygmt/tests/test_vlines.py b/pygmt/tests/test_vlines.py new file mode 100644 index 00000000000..21aff1c06d5 --- /dev/null +++ b/pygmt/tests/test_vlines.py @@ -0,0 +1,102 @@ +""" +Tests for Figure.vlines. +""" + +import pytest +from pygmt import Figure +from pygmt.exceptions import GMTInvalidInput + + +@pytest.mark.mpl_image_compare +def test_vlines_one_line(): + """ + Plot one vertical line. + """ + fig = Figure() + fig.basemap(region=[0, 10, 0, 10], projection="X10c/10c", frame=True) + fig.vlines(1) + fig.vlines(2, ymin=1) + fig.vlines(3, ymax=9) + fig.vlines(4, ymin=3, ymax=8) + fig.vlines(5, ymin=4, ymax=8, pen="1p,blue", label="Line at x=5") + fig.vlines(6, ymin=5, ymax=7, pen="1p,red", label="Line at x=6") + fig.legend() + return fig + + +@pytest.mark.mpl_image_compare +def test_vlines_multiple_lines(): + """ + Plot multiple vertical lines. + """ + fig = Figure() + fig.basemap(region=[0, 16, 0, 10], projection="X10c/10c", frame=True) + fig.vlines([1, 2]) + fig.vlines([3, 4, 5], ymin=[1, 2, 3]) + fig.vlines([6, 7, 8], ymax=[7, 8, 9]) + fig.vlines([9, 10], ymin=[1, 2], ymax=[9, 10]) + fig.vlines([11, 12], ymin=1, ymax=8, pen="1p,blue", label="Lines at x=11,12") + fig.vlines( + [13, 14], ymin=[3, 4], ymax=[7, 8], pen="1p,red", label="Lines at x=13,14" + ) + fig.legend() + return fig + + +@pytest.mark.mpl_image_compare +def test_vlines_clip(): + """ + Plot vertical lines with clipping or not. + """ + fig = Figure() + fig.basemap(region=[0, 10, 0, 4], projection="X10c/4c", frame=True) + fig.vlines(1, ymin=-1, ymax=5) + fig.vlines(2, ymin=-1, ymax=5, no_clip=True) + return fig + + +@pytest.mark.mpl_image_compare +def test_vlines_geographic_global(): + """ + Plot vertical lines in geographic coordinates. + """ + fig = Figure() + fig.basemap(region=[-180, 180, -90, 90], projection="R15c", frame="a30g30") + fig.vlines(30, pen="1p") + fig.vlines(90, ymin=-60, pen="1p,blue") + fig.vlines(-90, ymax=60, pen="1p,blue") + fig.vlines(120, ymin=-60, ymax=60, pen="1p,blue") + return fig + + +@pytest.mark.mpl_image_compare +def test_vlines_polar_projection(): + """ + Plot vertical lines in polar projection. + """ + fig = Figure() + fig.basemap(region=[0, 360, 0, 1], projection="P15c", frame=True) + fig.vlines(0, pen="1p") + fig.vlines(30, ymin=0, ymax=1, pen="1p") + fig.vlines(60, ymin=0.5, pen="1p") + fig.vlines(90, ymax=0.5, pen="1p") + fig.vlines(120, ymin=0.25, ymax=0.75, pen="1p") + return fig + + +def test_vlines_invalid_input(): + """ + Test invalid input for vlines. + """ + fig = Figure() + fig.basemap(region=[0, 10, 0, 6], projection="X10c/6c", frame=True) + with pytest.raises(GMTInvalidInput): + fig.vlines(1, ymin=2, ymax=[3, 4]) + with pytest.raises(GMTInvalidInput): + fig.vlines(1, ymin=[2, 3], ymax=4) + with pytest.raises(GMTInvalidInput): + fig.vlines(1, ymin=[2, 3], ymax=[4, 5]) + with pytest.raises(GMTInvalidInput): + fig.vlines([1, 2], ymin=[2, 3, 4], ymax=3) + with pytest.raises(GMTInvalidInput): + fig.vlines([1, 2], ymin=[2, 3], ymax=[4, 5, 6]) diff --git a/pygmt/tests/test_x2sys_cross.py b/pygmt/tests/test_x2sys_cross.py index c72cca04420..50b9d5f3f86 100644 --- a/pygmt/tests/test_x2sys_cross.py +++ b/pygmt/tests/test_x2sys_cross.py @@ -3,6 +3,7 @@ """ import copy +import platform import sys from pathlib import Path from tempfile import TemporaryDirectory @@ -37,6 +38,7 @@ def fixture_tracks(): return [dataframe.query(expr="z > -20")] # reduce size of dataset +# TODO(GMT>=6.5.0): Remove the xfail marker for the upstream bug fixed in GMT 6.5.0. @pytest.mark.usefixtures("mock_x2sys_home") @pytest.mark.xfail( condition=Version(__gmt_version__) < Version("6.5.0"), @@ -66,6 +68,7 @@ def test_x2sys_cross_input_file_output_file(): npt.assert_allclose(result["i_1"].max(), 82945.9370, rtol=1.0e-4) +# TODO(GMT>=6.5.0): Remove the xfail marker for the upstream bug fixed in GMT 6.5.0. @pytest.mark.usefixtures("mock_x2sys_home") @pytest.mark.xfail( condition=Version(__gmt_version__) < Version("6.5.0"), @@ -244,6 +247,7 @@ def test_x2sys_cross_invalid_tracks_input_type(tracks): x2sys_cross(tracks=[invalid_tracks]) +# TODO(GMT>=6.5.0): Remove the xfail marker for the upstream bug fixed in GMT 6.5.0. @pytest.mark.usefixtures("mock_x2sys_home") @pytest.mark.xfail( condition=Version(__gmt_version__) < Version("6.5.0"), @@ -267,7 +271,7 @@ def test_x2sys_cross_region_interpolation_numpoints(): ) assert isinstance(output, pd.DataFrame) - if sys.platform == "darwin": + if platform.machine() in {"aarch64", "arm64"}: assert output.shape == (3894, 12) # Check crossover errors (z_X) and mean value of observables (z_M) npt.assert_allclose(output.z_X.mean(), -138.23215, rtol=1e-4) @@ -279,6 +283,7 @@ def test_x2sys_cross_region_interpolation_numpoints(): npt.assert_allclose(output.z_M.mean(), -2896.875915, rtol=1e-4) +# TODO(GMT>=6.5.0): Remove the xfail marker for the upstream bug fixed in GMT 6.5.0. @pytest.mark.usefixtures("mock_x2sys_home") @pytest.mark.xfail( condition=Version(__gmt_version__) < Version("6.5.0"), @@ -294,7 +299,7 @@ def test_x2sys_cross_trackvalues(): output = x2sys_cross(tracks=["@tut_ship.xyz"], tag=tag, trackvalues=True) assert isinstance(output, pd.DataFrame) - if sys.platform == "darwin": + if platform.machine() in {"aarch64", "arm64"}: assert output.shape == (14374, 12) # Check mean of track 1 values (z_1) and track 2 values (z_2) npt.assert_allclose(output.z_1.mean(), -2422.973372, rtol=1e-4) diff --git a/pyproject.toml b/pyproject.toml index 8400da80625..61c6a541fef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta" name = "pygmt" description = "A Python interface for the Generic Mapping Tools" readme = "README.md" -requires-python = ">=3.10" +requires-python = ">=3.11" authors = [{name = "The PyGMT Developers", email = "pygmt.team@gmail.com"}] keywords = [ "cartography", @@ -24,13 +24,13 @@ classifiers = [ "Intended Audience :: Education", "Topic :: Scientific/Engineering", "Topic :: Software Development :: Libraries", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "License :: OSI Approved :: BSD License", ] dependencies = [ - "numpy>=1.24", + "numpy>=1.25", "pandas>=2.0", "xarray>=2023.04", "netCDF4", @@ -48,10 +48,11 @@ all = [ ] [project.urls] -homepage = "https://www.pygmt.org" -documentation = "https://www.pygmt.org" -repository = "https://github.com/GenericMappingTools/pygmt" -changelog = "https://www.pygmt.org/latest/changes.html" +"Homepage" = "https://www.pygmt.org" +"Documentation" = "https://www.pygmt.org" +"Source Code" = "https://github.com/GenericMappingTools/pygmt" +"Changelog" = "https://www.pygmt.org/latest/changes.html" +"Issue Tracker" = "https://github.com/GenericMappingTools/pygmt/issues" [tool.setuptools] platforms = ["Any"] @@ -66,7 +67,7 @@ local_scheme = "node-and-date" fallback_version = "999.999.999+unknown" [tool.codespell] -ignore-words-list = "astroid,oints,reenable,tripel,trough" +ignore-words-list = "astroid,oints,reenable,tripel,trough,ND" [tool.coverage.run] omit = ["*/tests/*", "*pygmt/__init__.py"] @@ -94,6 +95,7 @@ select = [ "COM", # flake8-commas "D", # pydocstyle "E", # pycodestyle + "EM", # flake8-errmsg "EXE", # flake8-executable "F", # pyflakes "FA", # flake8-future-annotations @@ -117,8 +119,10 @@ select = [ "S", # flake8-bandit "SIM", # flake8-simplify "T20", # flake8-print - "TCH", # flake8-type-checking + "TC", # flake8-type-checking + "TD", # flake8-todos "TID", # flake8-tidy-imports + "TRY", # tryceratops "UP", # pyupgrade "W", # pycodestyle warnings "YTT", # flake8-2020 @@ -145,6 +149,7 @@ ignore = [ "RET504", # Allow variable assignment and return immediately for readability "S603", # Allow method calls that initiate a subprocess without a shell "SIM117", # Allow nested `with` statements + "TD003", # Allow TODO comments without associated issue link ] preview = true explicit-preview-rules = true @@ -155,7 +160,10 @@ known-third-party = ["pygmt"] [tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] # Ignore `F401` (unused-import) in all `__init__.py` files "*/tests/test_*.py" = ["S101"] # Ignore `S101` (use of assert) in all tests files -"examples/**/*.py" = ["T201"] # Allow `print` in examples +"examples/**/*.py" = [ # Ignore rules in examples + "B018", # Allow useless expressions in Jupyter Notebooks + "T201", # Allow `print` statements +] [tool.ruff.lint.pycodestyle] max-doc-length = 88 diff --git a/requirements.txt b/requirements.txt index c74fbb0c511..61f6282dc7d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # Required packages -numpy>=1.24 +numpy>=1.25 pandas>=2.0 xarray>=2023.04 netCDF4