From 836fa7e7508c63ac53b70d274adf742cff7a21c5 Mon Sep 17 00:00:00 2001 From: delucchi-cmu Date: Thu, 29 Jun 2023 05:04:15 -0400 Subject: [PATCH] Mechanical application of black formatting --- docs/notebooks/catalog_size_inspection.ipynb | 12 +++---- docs/notebooks/cone_search.ipynb | 2 +- pyproject.toml | 10 +++++- src/.pylintrc | 2 +- .../association_catalog.py | 26 +++++---------- src/hipscat/catalog/catalog.py | 4 +-- .../catalog/dataset/base_catalog_info.py | 4 +-- .../catalog/dataset/catalog_info_factory.py | 12 ++----- src/hipscat/catalog/dataset/dataset.py | 4 +-- src/hipscat/catalog/partition_info.py | 10 ++---- src/hipscat/inspection/almanac.py | 24 ++++---------- src/hipscat/inspection/almanac_info.py | 20 +++-------- src/hipscat/inspection/visualize_catalog.py | 9 ++--- src/hipscat/io/__init__.py | 28 +++++++++++----- src/hipscat/io/file_io/__init__.py | 31 ++++++++++++----- src/hipscat/io/file_io/file_io.py | 16 +++------ src/hipscat/io/paths.py | 21 ++++-------- src/hipscat/io/write_metadata.py | 9 ++--- src/hipscat/pixel_math/__init__.py | 12 ++----- src/hipscat/pixel_math/healpix_pixel.py | 4 +-- .../pixel_math/healpix_pixel_convertor.py | 8 ++--- src/hipscat/pixel_math/hipscat_id.py | 12 ++----- src/hipscat/pixel_math/margin_bounding.py | 28 ++++------------ src/hipscat/pixel_math/partition_stats.py | 20 +++-------- src/hipscat/pixel_tree/pixel_node.py | 12 ++----- src/hipscat/pixel_tree/pixel_tree.py | 4 +-- src/hipscat/pixel_tree/pixel_tree_builder.py | 22 ++++--------- tests/.pylintrc | 2 +- tests/conftest.py | 4 +-- .../test_association_catalog.py | 25 ++++---------- .../test_association_catalog_info.py | 13 +++----- .../test_partition_join_info.py | 12 ++----- .../dataset/test_catalog_info_factory.py | 21 +++--------- tests/hipscat/catalog/dataset/test_dataset.py | 4 +-- .../test_margin_cache_catalog_info.py | 11 ++----- .../test_source_catalog_info.py | 3 +- tests/hipscat/catalog/test_catalog_info.py | 4 +-- tests/hipscat/inspection/test_almanac.py | 25 +++----------- tests/hipscat/inspection/test_almanac_info.py | 8 ++--- tests/hipscat/io/conftest.py | 3 +- tests/hipscat/io/test_paths.py | 4 +-- tests/hipscat/io/test_write_metadata.py | 31 +++++++---------- tests/hipscat/pixel_math/test_hipscat_id.py | 8 ++--- .../pixel_math/test_partition_stats.py | 33 +++++-------------- .../hipscat/pixel_math/test_pixel_margins.py | 10 ++---- tests/hipscat/pixel_tree/test_pixel_node.py | 28 ++++------------ .../pixel_tree/test_pixel_tree_builder.py | 12 ++----- .../util/test_healpix_pixel_convertor.py | 4 +-- 48 files changed, 207 insertions(+), 424 deletions(-) diff --git a/docs/notebooks/catalog_size_inspection.ipynb b/docs/notebooks/catalog_size_inspection.ipynb index ca2c554d..98008543 100644 --- a/docs/notebooks/catalog_size_inspection.ipynb +++ b/docs/notebooks/catalog_size_inspection.ipynb @@ -38,7 +38,7 @@ "import os\n", "\n", "### Change this path!!!\n", - "catalog_dir = '../../tests/data/small_sky_order1'\n", + "catalog_dir = \"../../tests/data/small_sky_order1\"\n", "\n", "### ----------------\n", "### You probably won't have to change anything from here.\n", @@ -48,11 +48,11 @@ "info_frame = catalog.get_pixels().copy()\n", "\n", "for index, partition in info_frame.iterrows():\n", - " file_name = result = paths.pixel_catalog_file(catalog_dir, partition['Norder'], partition['Npix'])\n", + " file_name = result = paths.pixel_catalog_file(catalog_dir, partition[\"Norder\"], partition[\"Npix\"])\n", " info_frame.loc[index, \"size_on_disk\"] = os.path.getsize(file_name)\n", "\n", "info_frame = info_frame.astype(int)\n", - "info_frame[\"gbs\"] = info_frame[\"size_on_disk\"]/(1024 * 1024 * 1024)" + "info_frame[\"gbs\"] = info_frame[\"size_on_disk\"] / (1024 * 1024 * 1024)" ] }, { @@ -89,11 +89,11 @@ "source": [ "print(f'healpix orders: {info_frame[\"Norder\"].unique()}')\n", "print(f'num partitions: {len(info_frame[\"Npix\"])}')\n", - "print('------')\n", + "print(\"------\")\n", "print(f'min rows: {info_frame[\"num_rows\"].min()}')\n", "print(f'max rows: {info_frame[\"num_rows\"].max()}')\n", "print(f'row ratio: {info_frame[\"num_rows\"].max()/info_frame[\"num_rows\"].min():.2f}')\n", - "print('------')\n", + "print(\"------\")\n", "print(f'min size_on_disk: {info_frame[\"gbs\"].min():.2f}')\n", "print(f'max size_on_disk: {info_frame[\"gbs\"].max():.2f}')\n", "print(f'size_on_disk ratio: {info_frame[\"gbs\"].max()/info_frame[\"gbs\"].min():.2f}')\n", @@ -127,7 +127,7 @@ "\n", "plt.hist(info_frame[\"gbs\"])\n", "\n", - "bins = [0,.5,1,2,100]\n", + "bins = [0, 0.5, 1, 2, 100]\n", "labels = [\"small-ish\", \"sweet-spot\", \"big-ish\", \"too-big\"]\n", "hist = np.histogram(info_frame[\"gbs\"], bins=bins)[0]\n", "pcts = hist / len(info_frame)\n", diff --git a/docs/notebooks/cone_search.ipynb b/docs/notebooks/cone_search.ipynb index 05c5f117..18717939 100644 --- a/docs/notebooks/cone_search.ipynb +++ b/docs/notebooks/cone_search.ipynb @@ -21,7 +21,7 @@ "catalog_path = \"\"\n", "ra = 24.7035278\n", "dec = -9.3653083\n", - "radius = 2 # arcsec" + "radius = 2 # arcsec" ] }, { diff --git a/pyproject.toml b/pyproject.toml index 49f29ab8..dbe0bd1b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,4 +61,12 @@ build-backend = "setuptools.build_meta" write_to = "src/hipscat/_version.py" [tool.coverage.run] -omit=["src/hipscat/_version.py"] \ No newline at end of file +omit=["src/hipscat/_version.py"] + +[tool.black] +line-length = 110 +target-version = ["py38"] + +[tool.isort] +profile = "black" +line_length = 110 \ No newline at end of file diff --git a/src/.pylintrc b/src/.pylintrc index 41dc55d6..387e4914 100644 --- a/src/.pylintrc +++ b/src/.pylintrc @@ -329,7 +329,7 @@ indent-after-paren=4 indent-string=' ' # Maximum number of characters on a single line. -max-line-length=100 +max-line-length=110 # Maximum number of lines in a module. max-module-lines=1000 diff --git a/src/hipscat/catalog/association_catalog/association_catalog.py b/src/hipscat/catalog/association_catalog/association_catalog.py index d408d21d..2219ea41 100644 --- a/src/hipscat/catalog/association_catalog/association_catalog.py +++ b/src/hipscat/catalog/association_catalog/association_catalog.py @@ -3,10 +3,8 @@ import pandas as pd from hipscat.catalog import CatalogType -from hipscat.catalog.association_catalog.association_catalog_info import \ - AssociationCatalogInfo -from hipscat.catalog.association_catalog.partition_join_info import \ - PartitionJoinInfo +from hipscat.catalog.association_catalog.association_catalog_info import AssociationCatalogInfo +from hipscat.catalog.association_catalog.partition_join_info import PartitionJoinInfo from hipscat.catalog.dataset.dataset import Dataset from hipscat.io import FilePointer, paths @@ -25,15 +23,13 @@ class AssociationCatalog(Dataset): JoinPixelInputTypes = Union[list, pd.DataFrame, PartitionJoinInfo] def __init__( - self, - catalog_info: CatalogInfoClass, - join_pixels: JoinPixelInputTypes, - catalog_path=None, + self, + catalog_info: CatalogInfoClass, + join_pixels: JoinPixelInputTypes, + catalog_path=None, ) -> None: if not catalog_info.catalog_type == CatalogType.ASSOCIATION: - raise ValueError( - "Catalog info `catalog_type` must be 'association'" - ) + raise ValueError("Catalog info `catalog_type` must be 'association'") super().__init__(catalog_info, catalog_path) self.join_info = self._get_partition_join_info_from_pixels(join_pixels) @@ -47,9 +43,7 @@ def get_join_pixels(self) -> pd.DataFrame: return self.join_info.data_frame @staticmethod - def _get_partition_join_info_from_pixels( - join_pixels: JoinPixelInputTypes - ) -> PartitionJoinInfo: + def _get_partition_join_info_from_pixels(join_pixels: JoinPixelInputTypes) -> PartitionJoinInfo: if isinstance(join_pixels, PartitionJoinInfo): return join_pixels if isinstance(join_pixels, pd.DataFrame): @@ -57,9 +51,7 @@ def _get_partition_join_info_from_pixels( raise TypeError("join_pixels must be of type PartitionJoinInfo or DataFrame") @classmethod - def _read_args( - cls, catalog_base_dir: FilePointer - ) -> Tuple[CatalogInfoClass, JoinPixelInputTypes]: + def _read_args(cls, catalog_base_dir: FilePointer) -> Tuple[CatalogInfoClass, JoinPixelInputTypes]: args = super()._read_args(catalog_base_dir) partition_join_info_file = paths.get_partition_join_info_pointer(catalog_base_dir) partition_join_info = PartitionJoinInfo.read_from_file(partition_join_info_file) diff --git a/src/hipscat/catalog/catalog.py b/src/hipscat/catalog/catalog.py index 35d832f3..ba6f5188 100644 --- a/src/hipscat/catalog/catalog.py +++ b/src/hipscat/catalog/catalog.py @@ -93,6 +93,4 @@ def _check_files_exist(cls, catalog_base_dir: FilePointer): super()._check_files_exist(catalog_base_dir) partition_info_file = paths.get_partition_info_pointer(catalog_base_dir) if not file_io.does_file_or_directory_exist(partition_info_file): - raise FileNotFoundError( - f"No partition info found where expected: {str(partition_info_file)}" - ) + raise FileNotFoundError(f"No partition info found where expected: {str(partition_info_file)}") diff --git a/src/hipscat/catalog/dataset/base_catalog_info.py b/src/hipscat/catalog/dataset/base_catalog_info.py index b2c48335..1cfe0864 100644 --- a/src/hipscat/catalog/dataset/base_catalog_info.py +++ b/src/hipscat/catalog/dataset/base_catalog_info.py @@ -62,6 +62,4 @@ def _check_required_fields(self): fields_dict = dataclasses.asdict(self) for field_name in self.required_fields: if field_name not in fields_dict or fields_dict[field_name] is None: - raise ValueError( - f"{field_name} is required in the Catalog Info and a value must be provided" - ) + raise ValueError(f"{field_name} is required in the Catalog Info and a value must be provided") diff --git a/src/hipscat/catalog/dataset/catalog_info_factory.py b/src/hipscat/catalog/dataset/catalog_info_factory.py index aba56e82..01652225 100644 --- a/src/hipscat/catalog/dataset/catalog_info_factory.py +++ b/src/hipscat/catalog/dataset/catalog_info_factory.py @@ -1,16 +1,12 @@ import dataclasses from typing import Optional -from hipscat.catalog.association_catalog.association_catalog_info import ( - AssociationCatalogInfo, -) +from hipscat.catalog.association_catalog.association_catalog_info import AssociationCatalogInfo from hipscat.catalog.catalog_info import CatalogInfo from hipscat.catalog.catalog_type import CatalogType from hipscat.catalog.dataset.base_catalog_info import BaseCatalogInfo from hipscat.catalog.index.index_catalog_info import IndexCatalogInfo -from hipscat.catalog.margin_cache.margin_cache_catalog_info import ( - MarginCacheCatalogInfo, -) +from hipscat.catalog.margin_cache.margin_cache_catalog_info import MarginCacheCatalogInfo from hipscat.catalog.source_catalog.source_catalog_info import SourceCatalogInfo from hipscat.io import FilePointer, file_io, paths @@ -24,9 +20,7 @@ """Map of catalog types to their expected subclass of BaseCatalogInfo.""" -def create_catalog_info( - keywords: dict, catalog_type: Optional[CatalogType] = None -) -> BaseCatalogInfo: +def create_catalog_info(keywords: dict, catalog_type: Optional[CatalogType] = None) -> BaseCatalogInfo: """Generate a typed catalog info object from the type specified explicitly or using ``catalog_type`` keyword. diff --git a/src/hipscat/catalog/dataset/dataset.py b/src/hipscat/catalog/dataset/dataset.py index 9502a2c0..47940c33 100644 --- a/src/hipscat/catalog/dataset/dataset.py +++ b/src/hipscat/catalog/dataset/dataset.py @@ -71,6 +71,4 @@ def _check_files_exist(cls, catalog_base_dir: FilePointer): raise FileNotFoundError(f"No directory exists at {str(catalog_base_dir)}") catalog_info_file = paths.get_catalog_info_pointer(catalog_base_dir) if not file_io.does_file_or_directory_exist(catalog_info_file): - raise FileNotFoundError( - f"No catalog info found where expected: {str(catalog_info_file)}" - ) + raise FileNotFoundError(f"No catalog info found where expected: {str(catalog_info_file)}") diff --git a/src/hipscat/catalog/partition_info.py b/src/hipscat/catalog/partition_info.py index a2354240..55bfe6e2 100644 --- a/src/hipscat/catalog/partition_info.py +++ b/src/hipscat/catalog/partition_info.py @@ -35,13 +35,11 @@ def get_healpix_pixels(self) -> List[HealpixPixel]: def get_highest_order(self) -> int: """Get the highest healpix order for the dataset. - + Returns: int representing highest order. """ - highest_order = np.max( - self.data_frame[self.METADATA_ORDER_COLUMN_NAME].values - ) + highest_order = np.max(self.data_frame[self.METADATA_ORDER_COLUMN_NAME].values) return highest_order @@ -56,9 +54,7 @@ def read_from_file(cls, partition_info_file: FilePointer): A `PartitionInfo` object with the data from the file """ if not file_io.does_file_or_directory_exist(partition_info_file): - raise FileNotFoundError( - f"No partition info found where expected: {str(partition_info_file)}" - ) + raise FileNotFoundError(f"No partition info found where expected: {str(partition_info_file)}") data_frame = file_io.load_csv_to_pandas(partition_info_file) return cls(data_frame) diff --git a/src/hipscat/inspection/almanac.py b/src/hipscat/inspection/almanac.py index adeadfc2..ee890e11 100644 --- a/src/hipscat/inspection/almanac.py +++ b/src/hipscat/inspection/almanac.py @@ -99,9 +99,7 @@ def _init_catalog_objects(self): else: full_name = catalog_info.catalog_name if full_name in self.entries: - warnings.warn( - f"Duplicate catalog name ({full_name}). Try using namespaces." - ) + warnings.warn(f"Duplicate catalog name ({full_name}). Try using namespaces.") else: self.entries[full_name] = catalog_info self.dir_to_catalog_name[catalog_info.catalog_path] = full_name @@ -121,9 +119,7 @@ def _init_catalog_links(self): elif catalog_entry.catalog_type == CatalogType.SOURCE: ## Source catalogs MAY indicate their primary object catalog. if catalog_entry.primary: - object_catalog = self._get_linked_catalog( - catalog_entry.primary, catalog_entry.namespace - ) + object_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace) if not object_catalog: warnings.warn( f"source catalog {catalog_entry.catalog_name} missing " @@ -135,9 +131,7 @@ def _init_catalog_links(self): object_catalog.sources.append(catalog_entry) elif catalog_entry.catalog_type == CatalogType.ASSOCIATION: ## Association table MUST have a primary and join catalog - primary_catalog = self._get_linked_catalog( - catalog_entry.primary, catalog_entry.namespace - ) + primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace) if not primary_catalog: warnings.warn( f"association table {catalog_entry.catalog_name} missing " @@ -161,9 +155,7 @@ def _init_catalog_links(self): join_catalog.associations_right.append(catalog_entry) elif catalog_entry.catalog_type == CatalogType.MARGIN: ## Margin catalogs MUST have a primary catalog - primary_catalog = self._get_linked_catalog( - catalog_entry.primary, catalog_entry.namespace - ) + primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace) if not primary_catalog: warnings.warn( f"margin table {catalog_entry.catalog_name} missing " @@ -174,9 +166,7 @@ def _init_catalog_links(self): primary_catalog.margins.append(catalog_entry) elif catalog_entry.catalog_type == CatalogType.INDEX: ## Index tables MUST have a primary catalog - primary_catalog = self._get_linked_catalog( - catalog_entry.primary, catalog_entry.namespace - ) + primary_catalog = self._get_linked_catalog(catalog_entry.primary, catalog_entry.namespace) if not primary_catalog: warnings.warn( f"index table {catalog_entry.catalog_name} missing " @@ -256,6 +246,4 @@ def get_catalog(self, catalog_name: str) -> Dataset: This will load the ``catalog_info.join`` and other relevant metadata files from disk.""" - return Dataset.read_from_hipscat( - self.get_almanac_info(catalog_name=catalog_name).catalog_path - ) + return Dataset.read_from_hipscat(self.get_almanac_info(catalog_name=catalog_name).catalog_path) diff --git a/src/hipscat/inspection/almanac_info.py b/src/hipscat/inspection/almanac_info.py index acefc382..54e34728 100644 --- a/src/hipscat/inspection/almanac_info.py +++ b/src/hipscat/inspection/almanac_info.py @@ -42,20 +42,10 @@ class AlmanacInfo: def __post_init__(self): if len(self.catalog_info): - self.catalog_info_object = catalog_info_factory.create_catalog_info( - self.catalog_info - ) - if ( - self.catalog_info - and "primary_catalog" in self.catalog_info - and not self.primary - ): + self.catalog_info_object = catalog_info_factory.create_catalog_info(self.catalog_info) + if self.catalog_info and "primary_catalog" in self.catalog_info and not self.primary: self.primary = self.catalog_info["primary_catalog"] - if ( - self.catalog_info - and "join_catalog" in self.catalog_info - and not self.join - ): + if self.catalog_info and "join_catalog" in self.catalog_info and not self.join: self.join = self.catalog_info["join_catalog"] ## Allows use of $HIPSCAT_DEFAULT_DIR in paths @@ -84,9 +74,7 @@ def get_default_dir() -> str: @classmethod def from_catalog_dir(cls, catalog_base_dir: str) -> Self: """Create almanac information from the catalog information found at the target directory""" - catalog_info = catalog_info_factory.from_catalog_dir( - catalog_base_dir=catalog_base_dir - ) + catalog_info = catalog_info_factory.from_catalog_dir(catalog_base_dir=catalog_base_dir) args = { "catalog_path": catalog_base_dir, "catalog_name": catalog_info.catalog_name, diff --git a/src/hipscat/inspection/visualize_catalog.py b/src/hipscat/inspection/visualize_catalog.py index 1da6e15c..0b1fa3de 100644 --- a/src/hipscat/inspection/visualize_catalog.py +++ b/src/hipscat/inspection/visualize_catalog.py @@ -17,7 +17,7 @@ def _read_point_map(catalog_base_dir): Args: catalog_base_dir: path to a catalog Returns: - one-dimensional numpy array of long integers where the value at each index + one-dimensional numpy array of long integers where the value at each index corresponds to the number of objects found at the healpix pixel. """ map_file_pointer = paths.get_point_map_file_pointer(catalog_base_dir) @@ -64,14 +64,11 @@ def plot_pixels(catalog: Catalog, projection="moll", draw_map=True): order_map = np.full(hp.order2npix(max_order), hp.pixelfunc.UNSEEN) for _, pixel in pixels.iterrows(): - explosion_factor = 4 ** ( - max_order - pixel[PartitionInfo.METADATA_ORDER_COLUMN_NAME] - ) + explosion_factor = 4 ** (max_order - pixel[PartitionInfo.METADATA_ORDER_COLUMN_NAME]) exploded_pixels = [ *range( pixel[PartitionInfo.METADATA_PIXEL_COLUMN_NAME] * explosion_factor, - (pixel[PartitionInfo.METADATA_PIXEL_COLUMN_NAME] + 1) - * explosion_factor, + (pixel[PartitionInfo.METADATA_PIXEL_COLUMN_NAME] + 1) * explosion_factor, ) ] order_map[exploded_pixels] = pixel[PartitionInfo.METADATA_ORDER_COLUMN_NAME] diff --git a/src/hipscat/io/__init__.py b/src/hipscat/io/__init__.py index 55276cf4..dc3c0124 100644 --- a/src/hipscat/io/__init__.py +++ b/src/hipscat/io/__init__.py @@ -1,11 +1,23 @@ """Utilities for reading and writing catalog files""" from .file_io import FilePointer, get_file_pointer_from_path -from .paths import (create_hive_directory_name, create_hive_parquet_file_name, - get_catalog_info_pointer, get_common_metadata_pointer, - get_parquet_metadata_pointer, get_partition_info_pointer, - get_point_map_file_pointer, get_provenance_pointer, - pixel_association_directory, pixel_association_file, - pixel_catalog_file, pixel_directory) -from .write_metadata import (write_catalog_info, write_parquet_metadata, - write_partition_info, write_provenance_info) +from .paths import ( + create_hive_directory_name, + create_hive_parquet_file_name, + get_catalog_info_pointer, + get_common_metadata_pointer, + get_parquet_metadata_pointer, + get_partition_info_pointer, + get_point_map_file_pointer, + get_provenance_pointer, + pixel_association_directory, + pixel_association_file, + pixel_catalog_file, + pixel_directory, +) +from .write_metadata import ( + write_catalog_info, + write_parquet_metadata, + write_partition_info, + write_provenance_info, +) diff --git a/src/hipscat/io/file_io/__init__.py b/src/hipscat/io/file_io/__init__.py index 6f8a8b9f..d5c3eb21 100644 --- a/src/hipscat/io/file_io/__init__.py +++ b/src/hipscat/io/file_io/__init__.py @@ -1,9 +1,22 @@ -from .file_io import (load_csv_to_pandas, load_json_file, make_directory, - read_fits_image, read_parquet_metadata, remove_directory, - write_dataframe_to_csv, write_fits_image, - write_parquet_metadata, write_string_to_file) -from .file_pointer import (FilePointer, append_paths_to_pointer, - directory_has_contents, - does_file_or_directory_exist, - find_files_matching_path, get_directory_contents, - get_file_pointer_from_path, is_regular_file) +from .file_io import ( + load_csv_to_pandas, + load_json_file, + make_directory, + read_fits_image, + read_parquet_metadata, + remove_directory, + write_dataframe_to_csv, + write_fits_image, + write_parquet_metadata, + write_string_to_file, +) +from .file_pointer import ( + FilePointer, + append_paths_to_pointer, + directory_has_contents, + does_file_or_directory_exist, + find_files_matching_path, + get_directory_contents, + get_file_pointer_from_path, + is_regular_file, +) diff --git a/src/hipscat/io/file_io/file_io.py b/src/hipscat/io/file_io/file_io.py index 3962ec80..f6a5a0a3 100644 --- a/src/hipscat/io/file_io/file_io.py +++ b/src/hipscat/io/file_io/file_io.py @@ -38,9 +38,7 @@ def remove_directory(file_pointer: FilePointer, ignore_errors=False): shutil.rmtree(file_pointer, ignore_errors=ignore_errors) -def write_string_to_file( - file_pointer: FilePointer, string: str, encoding: str = "utf-8" -): +def write_string_to_file(file_pointer: FilePointer, string: str, encoding: str = "utf-8"): """Write a string to a text file Args: @@ -79,9 +77,7 @@ def load_csv_to_pandas(file_pointer: FilePointer, **kwargs) -> pd.DataFrame: return pd.read_csv(file_pointer, **kwargs) -def write_dataframe_to_csv( - dataframe: pd.DataFrame, file_pointer: FilePointer, **kwargs -): +def write_dataframe_to_csv(dataframe: pd.DataFrame, file_pointer: FilePointer, **kwargs): """Write a pandas DataFrame to a CSV file Args: @@ -102,9 +98,7 @@ def read_parquet_metadata(file_pointer: FilePointer, **kwargs) -> pq.FileMetaDat return pq.read_metadata(file_pointer, **kwargs) -def write_parquet_metadata( - schema: Any, file_pointer: FilePointer, metadata_collector: list = None, **kwargs -): +def write_parquet_metadata(schema: Any, file_pointer: FilePointer, metadata_collector: list = None, **kwargs): """Write a metadata only parquet file from a schema Args: @@ -113,9 +107,7 @@ def write_parquet_metadata( metadata_collector: where to collect metadata information **kwargs: additional arguments to be passed to pyarrow.parquet.write_metadata """ - pq.write_metadata( - schema, file_pointer, metadata_collector=metadata_collector, **kwargs - ) + pq.write_metadata(schema, file_pointer, metadata_collector=metadata_collector, **kwargs) def read_fits_image(map_file_pointer: FilePointer): diff --git a/src/hipscat/io/paths.py b/src/hipscat/io/paths.py index 019f9b69..17375ec8 100644 --- a/src/hipscat/io/paths.py +++ b/src/hipscat/io/paths.py @@ -1,8 +1,7 @@ """Methods for creating partitioned data paths""" from __future__ import annotations -from hipscat.io.file_io.file_pointer import (FilePointer, - append_paths_to_pointer) +from hipscat.io.file_io.file_pointer import FilePointer, append_paths_to_pointer ORDER_DIRECTORY_PREFIX = "Norder" DIR_DIRECTORY_PREFIX = "Dir" @@ -47,9 +46,7 @@ def pixel_directory( """ norder = int(pixel_order) if pixel_number is None and directory_number is None: - raise ValueError( - "One of pixel_number or directory_number is required to create pixel directory" - ) + raise ValueError("One of pixel_number or directory_number is required to create pixel directory") if directory_number is not None: ndir = directory_number else: @@ -62,9 +59,7 @@ def pixel_directory( ) -def pixel_catalog_file( - catalog_base_dir: FilePointer, pixel_order: int, pixel_number: int -) -> FilePointer: +def pixel_catalog_file(catalog_base_dir: FilePointer, pixel_order: int, pixel_number: int) -> FilePointer: """Create path *pointer* for a pixel catalog file. This will not create the directory or file. The catalog file name will take the HiPS standard form of: @@ -201,15 +196,12 @@ def create_hive_directory_name(base_dir, partition_token_names, partition_token_ correspond to the token name parts. """ partition_tokens = [ - f"{name}={value}" - for name, value in zip(partition_token_names, partition_token_values) + f"{name}={value}" for name, value in zip(partition_token_names, partition_token_values) ] return append_paths_to_pointer(base_dir, *partition_tokens) -def create_hive_parquet_file_name( - base_dir, partition_token_names, partition_token_values -): +def create_hive_parquet_file_name(base_dir, partition_token_names, partition_token_values): """Create path *pointer* for a single parquet with hive partitioning naming. The file name will have the form of: @@ -223,8 +215,7 @@ def create_hive_parquet_file_name( correspond to the token name parts. """ partition_tokens = [ - f"{name}={value}" - for name, value in zip(partition_token_names, partition_token_values) + f"{name}={value}" for name, value in zip(partition_token_names, partition_token_values) ] return f"{append_paths_to_pointer(base_dir, *partition_tokens)}.parquet" diff --git a/src/hipscat/io/write_metadata.py b/src/hipscat/io/write_metadata.py index cc792491..3c5101f8 100644 --- a/src/hipscat/io/write_metadata.py +++ b/src/hipscat/io/write_metadata.py @@ -8,6 +8,7 @@ import numpy as np import pandas as pd import pyarrow.dataset as pds + from hipscat.io import file_io, paths @@ -35,9 +36,7 @@ def write_catalog_info(catalog_base_dir, dataset_info): write_json_file(metadata, catalog_info_pointer) -def write_provenance_info( - catalog_base_dir:file_io.FilePointer, dataset_info, tool_args: dict -): +def write_provenance_info(catalog_base_dir: file_io.FilePointer, dataset_info, tool_args: dict): """Write a provenance_info.json file with all assorted catalog creation metadata Args: @@ -77,9 +76,7 @@ def write_partition_info( "Norder", "Npix", ] - data_frame["num_rows"] = [ - pixel_info[0] for pixel_info in destination_healpix_pixel_map.values() - ] + data_frame["num_rows"] = [pixel_info[0] for pixel_info in destination_healpix_pixel_map.values()] data_frame["Dir"] = [int(x / 10_000) * 10_000 for x in data_frame["Npix"]] # Reorder the columns to match full path, and force to integer types. diff --git a/src/hipscat/pixel_math/__init__.py b/src/hipscat/pixel_math/__init__.py index 8df72a11..cc2e44e0 100644 --- a/src/hipscat/pixel_math/__init__.py +++ b/src/hipscat/pixel_math/__init__.py @@ -1,6 +1,7 @@ """Utilities for performing fun math on healpix pixels""" from .healpix_pixel import HealpixPixel +from .healpix_pixel_convertor import HealpixInputTypes, get_healpix_pixel from .hipscat_id import compute_hipscat_id, hipscat_id_to_healpix from .margin_bounding import ( check_margin_bounds, @@ -16,13 +17,4 @@ generate_destination_pixel_map, generate_histogram, ) -from .pixel_margins import ( - get_edge, - get_margin, - get_truncated_margin_pixels, - pixel_is_polar, -) -from .healpix_pixel_convertor import ( - get_healpix_pixel, - HealpixInputTypes, -) +from .pixel_margins import get_edge, get_margin, get_truncated_margin_pixels, pixel_is_polar diff --git a/src/hipscat/pixel_math/healpix_pixel.py b/src/hipscat/pixel_math/healpix_pixel.py index e2499043..7df6204a 100644 --- a/src/hipscat/pixel_math/healpix_pixel.py +++ b/src/hipscat/pixel_math/healpix_pixel.py @@ -22,9 +22,7 @@ def __post_init__(self) -> None: pixel: HEALPix pixel number in NESTED ordering scheme """ if self.order > HIPSCAT_ID_HEALPIX_ORDER: - raise ValueError( - f"HEALPix order cannot be greater than {HIPSCAT_ID_HEALPIX_ORDER}" - ) + raise ValueError(f"HEALPix order cannot be greater than {HIPSCAT_ID_HEALPIX_ORDER}") def __str__(self) -> str: return f"Order: {self.order}, Pixel: {self.pixel}" diff --git a/src/hipscat/pixel_math/healpix_pixel_convertor.py b/src/hipscat/pixel_math/healpix_pixel_convertor.py index 34e868f3..aee4b482 100644 --- a/src/hipscat/pixel_math/healpix_pixel_convertor.py +++ b/src/hipscat/pixel_math/healpix_pixel_convertor.py @@ -17,12 +17,8 @@ def get_healpix_pixel(pixel: HealpixInputTypes) -> HealpixPixel: if isinstance(pixel, tuple): if len(pixel) != 2: - raise ValueError( - "Tuple must contain two values: HEALPix order and HEALPix pixel number" - ) + raise ValueError("Tuple must contain two values: HEALPix order and HEALPix pixel number") return HealpixPixel(order=pixel[0], pixel=pixel[1]) if isinstance(pixel, HealpixPixel): return pixel - raise TypeError( - "pixel must either be of type `HealpixPixel` or tuple (order, pixel)" - ) + raise TypeError("pixel must either be of type `HealpixPixel` or tuple (order, pixel)") diff --git a/src/hipscat/pixel_math/hipscat_id.py b/src/hipscat/pixel_math/hipscat_id.py index 9149145f..8e70438f 100644 --- a/src/hipscat/pixel_math/hipscat_id.py +++ b/src/hipscat/pixel_math/hipscat_id.py @@ -38,9 +38,7 @@ def compute_hipscat_id(ra_values, dec_values): ## Construct the bit-shifted healpix segment value_count = len(ra_values) - mapped_pixels = hp.ang2pix( - 2**HIPSCAT_ID_HEALPIX_ORDER, ra_values, dec_values, nest=True, lonlat=True - ) + mapped_pixels = hp.ang2pix(2**HIPSCAT_ID_HEALPIX_ORDER, ra_values, dec_values, nest=True, lonlat=True) ## We sort to put pixels next to each other that will need to be counted. ## This simplifies the counter logic, as we can subtract the index where @@ -49,17 +47,13 @@ def compute_hipscat_id(ra_values, dec_values): mapped_pixels = mapped_pixels[sort_index] ## Construct the counter. - _, unique_indices, unique_inverse = np.unique( - mapped_pixels, return_inverse=True, return_index=True - ) + _, unique_indices, unique_inverse = np.unique(mapped_pixels, return_inverse=True, return_index=True) unique_indices = unique_indices.astype(np.uint64) boring_number_index = np.arange(value_count, dtype=np.uint64) offset_counter = boring_number_index - unique_indices[unique_inverse] ## Add counter to shifted pixel, and map back to the original, unsorted, values - shifted_pixels = mapped_pixels.astype(np.uint64) << ( - 64 - (4 + 2 * HIPSCAT_ID_HEALPIX_ORDER) - ) + shifted_pixels = mapped_pixels.astype(np.uint64) << (64 - (4 + 2 * HIPSCAT_ID_HEALPIX_ORDER)) shifted_pixels = shifted_pixels + offset_counter unsort_index = np.argsort(sort_index, kind="stable") diff --git a/src/hipscat/pixel_math/margin_bounding.py b/src/hipscat/pixel_math/margin_bounding.py index 5c292c3c..2a2cd00a 100644 --- a/src/hipscat/pixel_math/margin_bounding.py +++ b/src/hipscat/pixel_math/margin_bounding.py @@ -46,18 +46,14 @@ def get_margin_bounds_and_wcs(pixel_order, pix, scale, step=10): """ # pylint: disable=too-many-locals # pylint: disable=too-many-statements - corners = hp.vec2dir( - hp.boundaries(2**pixel_order, pix, step=1, nest=True), lonlat=True - ) + corners = hp.vec2dir(hp.boundaries(2**pixel_order, pix, step=1, nest=True), lonlat=True) min_ra = corners[0][1] # western corner max_ra = corners[0][3] # eastern corner min_dec = corners[1][2] # southern corner max_dec = corners[1][0] # northern corner - pixel_boundaries = hp.vec2dir( - hp.boundaries(2**pixel_order, pix, step=step, nest=True), lonlat=True - ) + pixel_boundaries = hp.vec2dir(hp.boundaries(2**pixel_order, pix, step=step, nest=True), lonlat=True) # if the eastern corner is less than the western corner, then we've hit the # ra rollover and need to normalize to 0 -> 360. @@ -75,9 +71,7 @@ def get_margin_bounds_and_wcs(pixel_order, pix, scale, step=10): translate_lon = centroid_lon - (centroid_lon * scale) translate_lat = centroid_lat - (centroid_lat * scale) - affine_matrix = np.array( - [[scale, 0, translate_lon], [0, scale, translate_lat], [0, 0, 1]] - ) + affine_matrix = np.array([[scale, 0, translate_lon], [0, scale, translate_lat], [0, 0, 1]]) # convert the orignal boundary coordinates into # a homogenous coordinate space (3-dim) @@ -147,9 +141,7 @@ def get_margin_bounds_and_wcs(pixel_order, pix, scale, step=10): wcs_margin.wcs.cdelt = [pix_size, pix_size] wcs_margin.array_shape = [ra_naxis, dec_naxis] - vertices = SkyCoord( - transformed_bounding_box[0], transformed_bounding_box[1], unit="deg" - ) + vertices = SkyCoord(transformed_bounding_box[0], transformed_bounding_box[1], unit="deg") sky_region = PolygonSkyRegion(vertices=vertices) polygon_region = sky_region.to_pixel(wcs_margin) polygons = [(polygon_region, wcs_margin)] @@ -180,9 +172,7 @@ def check_margin_bounds(r_asc, dec, poly_and_wcs): sky_coords = SkyCoord(r_asc, dec, unit="deg") bound_vals = [] for poly, wcs in poly_and_wcs: - x_coords, y_coords = world_coordinate_system.utils.skycoord_to_pixel( - sky_coords, wcs - ) + x_coords, y_coords = world_coordinate_system.utils.skycoord_to_pixel(sky_coords, wcs) pix_coords = PixCoord(x_coords, y_coords) vals = poly.contains(pix_coords) bound_vals.append(vals) @@ -190,9 +180,7 @@ def check_margin_bounds(r_asc, dec, poly_and_wcs): # pylint: disable=too-many-locals -def check_polar_margin_bounds( - r_asc, dec, order, pix, margin_order, margin_threshold, step=1000 -): +def check_polar_margin_bounds(r_asc, dec, order, pix, margin_order, margin_threshold, step=1000): """Given a set of ra and dec values that are around one of the poles, determine if they are within `margin_threshold` of a provided partition pixel. This method helps us solve the edge cases that @@ -227,9 +215,7 @@ def check_polar_margin_bounds( # get the approximate number of boundary samples to cover a highest_k pixel # on the boundary of the main pixel boundary_range = int((marg_pix_res / part_pix_res) * step) - pixel_boundaries = hp.vec2dir( - hp.boundaries(2**order, pix, step=step, nest=True), lonlat=True - ) + pixel_boundaries = hp.vec2dir(hp.boundaries(2**order, pix, step=step, nest=True), lonlat=True) # to optimize our code, we only want to take boundary samples from the part # of the pixel that directly abuts the polar margin pixels. diff --git a/src/hipscat/pixel_math/partition_stats.py b/src/hipscat/pixel_math/partition_stats.py index a0c666a0..6745d67a 100644 --- a/src/hipscat/pixel_math/partition_stats.py +++ b/src/hipscat/pixel_math/partition_stats.py @@ -56,9 +56,7 @@ def generate_histogram( return histogram_result -def generate_alignment( - histogram, highest_order=10, lowest_order=0, threshold=1_000_000 -): +def generate_alignment(histogram, highest_order=10, lowest_order=0, threshold=1_000_000): """Generate alignment from high order pixels to those of equal or lower order We may initially find healpix pixels at order 10, but after aggregating up to the pixel @@ -218,20 +216,14 @@ def compute_pixel_map(histogram, highest_order=10, lowest_order=0, threshold=1_0 parent_alignment = np.repeat(orders_at_threshold, 4, axis=0) orders_at_threshold = [ parent_order if parent_order is not None else new_order - for parent_order, new_order in zip( - parent_alignment, new_orders_at_threshold - ) + for parent_order, new_order in zip(parent_alignment, new_orders_at_threshold) ] ## Zip up the orders and the pixel numbers. healpix_pixels = np.array( [ - HealpixPixel(order, pixel >> 2 * (highest_order - order)) - if order is not None - else None - for order, pixel in zip( - orders_at_threshold, np.arange(0, len(orders_at_threshold)) - ) + HealpixPixel(order, pixel >> 2 * (highest_order - order)) if order is not None else None + for order, pixel in zip(orders_at_threshold, np.arange(0, len(orders_at_threshold))) ] ) @@ -282,9 +274,7 @@ def generate_constant_pixel_map(histogram, constant_healpix_order): raise ValueError("histogram is not the right size") non_zero_indexes = np.nonzero(histogram)[0] - healpix_pixels = [ - HealpixPixel(constant_healpix_order, pixel) for pixel in non_zero_indexes - ] + healpix_pixels = [HealpixPixel(constant_healpix_order, pixel) for pixel in non_zero_indexes] value_list = [(histogram[pixel], [pixel]) for pixel in non_zero_indexes] diff --git a/src/hipscat/pixel_tree/pixel_node.py b/src/hipscat/pixel_tree/pixel_node.py index 15cc3860..3361fb96 100644 --- a/src/hipscat/pixel_tree/pixel_node.py +++ b/src/hipscat/pixel_tree/pixel_node.py @@ -48,9 +48,7 @@ def __init__( if parent is None: raise ValueError("Inner and leaf nodes must have a parent") if pixel.order < 0 or pixel.pixel < 0: - raise ValueError( - "Inner and leaf nodes must have an order and pixel number >= 0" - ) + raise ValueError("Inner and leaf nodes must have an order and pixel number >= 0") if parent is not None and parent.hp_order != pixel.order - 1: raise ValueError("Parent node must be at order one less than current node") @@ -90,16 +88,12 @@ def add_child_node(self, child: PixelNode): OverflowError: The node already has the maximum amount of children """ if not child.parent == self: - raise ValueError( - "Child node to add must have the node it is adding to as its parent" - ) + raise ValueError("Child node to add must have the node it is adding to as its parent") if len(self.children) >= self._NODE_TYPE_MAX_CHILDREN[self.node_type]: raise OverflowError("Node already has the maximum amount of children") - insert_index = bisect( - list(map(lambda node: node.hp_pixel, self.children)), child.hp_pixel - ) + insert_index = bisect(list(map(lambda node: node.hp_pixel, self.children)), child.hp_pixel) self.children.insert(insert_index, child) def get_all_leaf_descendants(self) -> List[PixelNode]: diff --git a/src/hipscat/pixel_tree/pixel_tree.py b/src/hipscat/pixel_tree/pixel_tree.py index 2341b6c3..53d88b5a 100644 --- a/src/hipscat/pixel_tree/pixel_tree.py +++ b/src/hipscat/pixel_tree/pixel_tree.py @@ -20,9 +20,7 @@ class PixelTree: 12 base HEALPix pixels """ - def __init__( - self, root_pixel: PixelNode, pixels: dict[int, dict[int, PixelNode]] - ) -> None: + def __init__(self, root_pixel: PixelNode, pixels: dict[int, dict[int, PixelNode]]) -> None: """Initialises a tree object from the nodes in the tree Args: diff --git a/src/hipscat/pixel_tree/pixel_tree_builder.py b/src/hipscat/pixel_tree/pixel_tree_builder.py index 49d90f33..756f6b40 100644 --- a/src/hipscat/pixel_tree/pixel_tree_builder.py +++ b/src/hipscat/pixel_tree/pixel_tree_builder.py @@ -5,11 +5,10 @@ import pandas as pd from hipscat.catalog.partition_info import PartitionInfo +from hipscat.pixel_math.healpix_pixel_convertor import HealpixInputTypes, get_healpix_pixel from hipscat.pixel_tree.pixel_node import PixelNode from hipscat.pixel_tree.pixel_node_type import PixelNodeType from hipscat.pixel_tree.pixel_tree import PixelTree -from hipscat.pixel_math.healpix_pixel_convertor import (HealpixInputTypes, - get_healpix_pixel) class PixelTreeBuilder: @@ -102,9 +101,7 @@ def _create_tree_from_partition_info_df(self, partition_info_df: pd.DataFrame): PixelNodeType.LEAF, ) - def create_node_and_parent_if_not_exist( - self, pixel: HealpixInputTypes, node_type: PixelNodeType - ): + def create_node_and_parent_if_not_exist(self, pixel: HealpixInputTypes, node_type: PixelNodeType): """Creates a node and adds to `self.pixels` in the tree, and recursively creates parent node if parent does not exist @@ -115,9 +112,7 @@ def create_node_and_parent_if_not_exist( """ pixel = get_healpix_pixel(pixel) if self.contains(pixel): - raise ValueError( - "Incorrectly configured catalog: catalog contains duplicate pixels" - ) + raise ValueError("Incorrectly configured catalog: catalog contains duplicate pixels") if pixel.order == 0: self.create_node(pixel, node_type, self.root_pixel) @@ -126,23 +121,18 @@ def create_node_and_parent_if_not_exist( parent_order = pixel.order - 1 parent_pixel = pixel.pixel >> 2 if not self.contains((parent_order, parent_pixel)): - self.create_node_and_parent_if_not_exist( - (parent_order, parent_pixel), PixelNodeType.INNER - ) + self.create_node_and_parent_if_not_exist((parent_order, parent_pixel), PixelNodeType.INNER) parent = self.pixels[parent_order][parent_pixel] if parent.node_type != PixelNodeType.INNER: raise ValueError( - "Incorrectly configured catalog: catalog contains pixels defined at " - "multiple orders" + "Incorrectly configured catalog: catalog contains pixels defined at multiple orders" ) self.create_node(pixel, node_type, parent) - def create_node( - self, pixel: HealpixInputTypes, node_type: PixelNodeType, parent: PixelNode - ): + def create_node(self, pixel: HealpixInputTypes, node_type: PixelNodeType, parent: PixelNode): """Create a node and add to `self.pixels` in the tree Args: diff --git a/tests/.pylintrc b/tests/.pylintrc index 902510bc..b382eb0b 100644 --- a/tests/.pylintrc +++ b/tests/.pylintrc @@ -329,7 +329,7 @@ indent-after-paren=4 indent-string=' ' # Maximum number of characters on a single line. -max-line-length=100 +max-line-length=110 # Maximum number of lines in a module. max-module-lines=1000 diff --git a/tests/conftest.py b/tests/conftest.py index e1c31d43..3954d303 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,9 +6,7 @@ import pytest from hipscat.catalog import PartitionInfo -from hipscat.catalog.association_catalog.association_catalog_info import ( - AssociationCatalogInfo, -) +from hipscat.catalog.association_catalog.association_catalog_info import AssociationCatalogInfo from hipscat.catalog.association_catalog.partition_join_info import PartitionJoinInfo from hipscat.catalog.catalog_info import CatalogInfo from hipscat.catalog.dataset.base_catalog_info import BaseCatalogInfo diff --git a/tests/hipscat/catalog/association_catalog/test_association_catalog.py b/tests/hipscat/catalog/association_catalog/test_association_catalog.py index 1aedfd5c..564d8291 100644 --- a/tests/hipscat/catalog/association_catalog/test_association_catalog.py +++ b/tests/hipscat/catalog/association_catalog/test_association_catalog.py @@ -3,19 +3,16 @@ import pandas as pd import pytest + from hipscat.catalog import CatalogType from hipscat.catalog.association_catalog.association_catalog import AssociationCatalog from hipscat.catalog.association_catalog.partition_join_info import PartitionJoinInfo def test_init_catalog(association_catalog_info, association_catalog_join_pixels): - catalog = AssociationCatalog( - association_catalog_info, association_catalog_join_pixels - ) + catalog = AssociationCatalog(association_catalog_info, association_catalog_join_pixels) assert catalog.catalog_name == association_catalog_info.catalog_name - pd.testing.assert_frame_equal( - catalog.get_join_pixels(), association_catalog_join_pixels - ) + pd.testing.assert_frame_equal(catalog.get_join_pixels(), association_catalog_join_pixels) assert catalog.catalog_info == association_catalog_info @@ -36,14 +33,10 @@ def test_wrong_join_pixels_type(association_catalog_info): AssociationCatalog(association_catalog_info, "test") -def test_different_join_pixels_type( - association_catalog_info, association_catalog_join_pixels -): +def test_different_join_pixels_type(association_catalog_info, association_catalog_join_pixels): partition_join_info = PartitionJoinInfo(association_catalog_join_pixels) catalog = AssociationCatalog(association_catalog_info, partition_join_info) - pd.testing.assert_frame_equal( - catalog.get_join_pixels(), association_catalog_join_pixels - ) + pd.testing.assert_frame_equal(catalog.get_join_pixels(), association_catalog_join_pixels) def test_read_from_file(association_catalog_path, association_catalog_join_pixels): @@ -51,9 +44,7 @@ def test_read_from_file(association_catalog_path, association_catalog_join_pixel assert catalog.on_disk assert catalog.catalog_path == association_catalog_path assert len(catalog.get_join_pixels()) == 4 - pd.testing.assert_frame_equal( - catalog.get_join_pixels(), association_catalog_join_pixels - ) + pd.testing.assert_frame_equal(catalog.get_join_pixels(), association_catalog_join_pixels) info = catalog.catalog_info assert info.primary_catalog == "small_sky" @@ -62,9 +53,7 @@ def test_read_from_file(association_catalog_path, association_catalog_join_pixel assert info.join_column == "id" -def test_empty_directory( - tmp_path, association_catalog_info_data, association_catalog_join_pixels -): +def test_empty_directory(tmp_path, association_catalog_info_data, association_catalog_join_pixels): """Test loading empty or incomplete data""" ## Path doesn't exist with pytest.raises(FileNotFoundError): diff --git a/tests/hipscat/catalog/association_catalog/test_association_catalog_info.py b/tests/hipscat/catalog/association_catalog/test_association_catalog_info.py index 05fe2520..9d604f87 100644 --- a/tests/hipscat/catalog/association_catalog/test_association_catalog_info.py +++ b/tests/hipscat/catalog/association_catalog/test_association_catalog_info.py @@ -3,15 +3,12 @@ import pytest -from hipscat.catalog.association_catalog.association_catalog_info import \ - AssociationCatalogInfo +from hipscat.catalog.association_catalog.association_catalog_info import AssociationCatalogInfo from hipscat.catalog.catalog_type import CatalogType from hipscat.io import file_io -def test_association_catalog_info( - association_catalog_info_data, assert_catalog_info_matches_dict -): +def test_association_catalog_info(association_catalog_info_data, assert_catalog_info_matches_dict): info = AssociationCatalogInfo(**association_catalog_info_data) assert_catalog_info_matches_dict(info, association_catalog_info_data) @@ -24,9 +21,7 @@ def test_str(association_catalog_info_data): assert str(cat_info) == correct_string -def test_read_from_file( - association_catalog_info_file, assert_catalog_info_matches_dict -): +def test_read_from_file(association_catalog_info_file, assert_catalog_info_matches_dict): cat_info_fp = file_io.get_file_pointer_from_path(association_catalog_info_file) catalog_info = AssociationCatalogInfo.read_from_metadata_file(cat_info_fp) for column in [ @@ -70,4 +65,4 @@ def test_wrong_type(association_catalog_info_data, catalog_info_data): with pytest.raises(ValueError, match="type association"): init_data = association_catalog_info_data.copy() init_data["catalog_type"] = CatalogType.OBJECT - AssociationCatalogInfo(**init_data) \ No newline at end of file + AssociationCatalogInfo(**init_data) diff --git a/tests/hipscat/catalog/association_catalog/test_partition_join_info.py b/tests/hipscat/catalog/association_catalog/test_partition_join_info.py index 277b203e..6b03abe2 100644 --- a/tests/hipscat/catalog/association_catalog/test_partition_join_info.py +++ b/tests/hipscat/catalog/association_catalog/test_partition_join_info.py @@ -7,9 +7,7 @@ def test_init(association_catalog_join_pixels): partition_join_info = PartitionJoinInfo(association_catalog_join_pixels) - pd.testing.assert_frame_equal( - partition_join_info.data_frame, association_catalog_join_pixels - ) + pd.testing.assert_frame_equal(partition_join_info.data_frame, association_catalog_join_pixels) def test_wrong_columns(association_catalog_join_pixels): @@ -20,11 +18,7 @@ def test_wrong_columns(association_catalog_join_pixels): PartitionJoinInfo(join_pixels) -def test_read_from_file( - association_catalog_partition_join_file, association_catalog_join_pixels -): - file_pointer = file_io.get_file_pointer_from_path( - association_catalog_partition_join_file - ) +def test_read_from_file(association_catalog_partition_join_file, association_catalog_join_pixels): + file_pointer = file_io.get_file_pointer_from_path(association_catalog_partition_join_file) info = PartitionJoinInfo.read_from_file(file_pointer) pd.testing.assert_frame_equal(info.data_frame, association_catalog_join_pixels) diff --git a/tests/hipscat/catalog/dataset/test_catalog_info_factory.py b/tests/hipscat/catalog/dataset/test_catalog_info_factory.py index f3bffa78..5c4bfc6c 100644 --- a/tests/hipscat/catalog/dataset/test_catalog_info_factory.py +++ b/tests/hipscat/catalog/dataset/test_catalog_info_factory.py @@ -1,18 +1,11 @@ import pytest -from hipscat.catalog.association_catalog.association_catalog_info import ( - AssociationCatalogInfo, -) +from hipscat.catalog.association_catalog.association_catalog_info import AssociationCatalogInfo from hipscat.catalog.catalog_info import CatalogInfo from hipscat.catalog.dataset.base_catalog_info import BaseCatalogInfo -from hipscat.catalog.dataset.catalog_info_factory import ( - create_catalog_info, - from_catalog_dir, -) +from hipscat.catalog.dataset.catalog_info_factory import create_catalog_info, from_catalog_dir from hipscat.catalog.index.index_catalog_info import IndexCatalogInfo -from hipscat.catalog.margin_cache.margin_cache_catalog_info import ( - MarginCacheCatalogInfo, -) +from hipscat.catalog.margin_cache.margin_cache_catalog_info import MarginCacheCatalogInfo from hipscat.catalog.source_catalog.source_catalog_info import SourceCatalogInfo @@ -45,9 +38,7 @@ def test_create_catalog_info_association(association_catalog_info_data): assert isinstance(catalog_info, AssociationCatalogInfo) -def test_create_catalog_info_source( - source_catalog_info, source_catalog_info_with_extra -): +def test_create_catalog_info_source(source_catalog_info, source_catalog_info_with_extra): catalog_info = create_catalog_info(source_catalog_info) assert catalog_info.catalog_name == "test_source" assert isinstance(catalog_info, BaseCatalogInfo) @@ -106,9 +97,7 @@ def test_from_catalog_dir_index(index_catalog_info_file): assert isinstance(catalog_info, IndexCatalogInfo) -def test_from_catalog_dir_association( - association_catalog_path, association_catalog_info_file -): +def test_from_catalog_dir_association(association_catalog_path, association_catalog_info_file): catalog_info = from_catalog_dir(association_catalog_path) assert catalog_info.catalog_name == "small_sky_to_small_sky_order1" assert isinstance(catalog_info, BaseCatalogInfo) diff --git a/tests/hipscat/catalog/dataset/test_dataset.py b/tests/hipscat/catalog/dataset/test_dataset.py index 2a593598..10baf96d 100644 --- a/tests/hipscat/catalog/dataset/test_dataset.py +++ b/tests/hipscat/catalog/dataset/test_dataset.py @@ -20,9 +20,7 @@ def test_dataset_wrong_catalog_info(base_catalog_info_data): Dataset(base_catalog_info_data) -def test_read_from_hipscat( - dataset_path, base_catalog_info_file, assert_catalog_info_matches_dict -): +def test_read_from_hipscat(dataset_path, base_catalog_info_file, assert_catalog_info_matches_dict): dataset = Dataset.read_from_hipscat(dataset_path) assert dataset.on_disk assert dataset.catalog_path == dataset_path diff --git a/tests/hipscat/catalog/margin_cache/test_margin_cache_catalog_info.py b/tests/hipscat/catalog/margin_cache/test_margin_cache_catalog_info.py index 0664036b..d95cd401 100644 --- a/tests/hipscat/catalog/margin_cache/test_margin_cache_catalog_info.py +++ b/tests/hipscat/catalog/margin_cache/test_margin_cache_catalog_info.py @@ -4,14 +4,11 @@ import pytest from hipscat.catalog.catalog_type import CatalogType -from hipscat.catalog.margin_cache.margin_cache_catalog_info import \ - MarginCacheCatalogInfo +from hipscat.catalog.margin_cache.margin_cache_catalog_info import MarginCacheCatalogInfo from hipscat.io import file_io -def test_margin_cache_catalog_info( - margin_cache_catalog_info, assert_catalog_info_matches_dict -): +def test_margin_cache_catalog_info(margin_cache_catalog_info, assert_catalog_info_matches_dict): info = MarginCacheCatalogInfo(**margin_cache_catalog_info) assert_catalog_info_matches_dict(info, margin_cache_catalog_info) @@ -24,9 +21,7 @@ def test_str(margin_cache_catalog_info): assert str(cat_info) == correct_string -def test_read_from_file( - margin_cache_catalog_info_file, assert_catalog_info_matches_dict -): +def test_read_from_file(margin_cache_catalog_info_file, assert_catalog_info_matches_dict): cat_info_fp = file_io.get_file_pointer_from_path(margin_cache_catalog_info_file) catalog_info = MarginCacheCatalogInfo.read_from_metadata_file(cat_info_fp) for column in [ diff --git a/tests/hipscat/catalog/source_catalog/test_source_catalog_info.py b/tests/hipscat/catalog/source_catalog/test_source_catalog_info.py index 529e3622..c96c2344 100644 --- a/tests/hipscat/catalog/source_catalog/test_source_catalog_info.py +++ b/tests/hipscat/catalog/source_catalog/test_source_catalog_info.py @@ -4,8 +4,7 @@ import pytest from hipscat.catalog.catalog_type import CatalogType -from hipscat.catalog.source_catalog.source_catalog_info import \ - SourceCatalogInfo +from hipscat.catalog.source_catalog.source_catalog_info import SourceCatalogInfo from hipscat.io import file_io diff --git a/tests/hipscat/catalog/test_catalog_info.py b/tests/hipscat/catalog/test_catalog_info.py index 123d53e9..ec9572f2 100644 --- a/tests/hipscat/catalog/test_catalog_info.py +++ b/tests/hipscat/catalog/test_catalog_info.py @@ -10,9 +10,7 @@ def test_catalog_info(catalog_info_data, assert_catalog_info_matches_dict): assert_catalog_info_matches_dict(info, catalog_info_data) -def test_catalog_info_defaults( - base_catalog_info_data, assert_catalog_info_matches_dict -): +def test_catalog_info_defaults(base_catalog_info_data, assert_catalog_info_matches_dict): info = CatalogInfo(**base_catalog_info_data) actual_catalog_info = base_catalog_info_data.copy() actual_catalog_info["epoch"] = "J2000" diff --git a/tests/hipscat/inspection/test_almanac.py b/tests/hipscat/inspection/test_almanac.py index 3263c274..5e6261ad 100644 --- a/tests/hipscat/inspection/test_almanac.py +++ b/tests/hipscat/inspection/test_almanac.py @@ -74,14 +74,7 @@ def test_catalogs_filters(default_almanac): assert len(default_almanac.catalogs(include_deprecated=True)) == 9 ## all object and source (skip association/index/etc) - assert ( - len( - default_almanac.catalogs( - include_deprecated=True, types=["object", "source"] - ) - ) - == 6 - ) + assert len(default_almanac.catalogs(include_deprecated=True, types=["object", "source"])) == 6 ## all active object and source assert len(default_almanac.catalogs(types=["object", "source"])) == 5 @@ -98,9 +91,7 @@ def test_linked_catalogs_object(default_almanac): source_almanac = object_almanac.sources[0] assert source_almanac.catalog_name == "small_sky_source_catalog" - source_almanac = default_almanac.get_almanac_info( - object_almanac.sources[0].catalog_name - ) + source_almanac = default_almanac.get_almanac_info(object_almanac.sources[0].catalog_name) assert source_almanac.catalog_name == "small_sky_source_catalog" ## TODO - this could use some more direct API. @@ -121,9 +112,7 @@ def test_linked_catalogs_source(default_almanac, test_data_dir): ## This source catalog has no object catalog, *and that's ok* new_almanac = Almanac( - dirs=os.path.join( - test_data_dir, "almanac_exception", "standalone_source_catalog.yml" - ) + dirs=os.path.join(test_data_dir, "almanac_exception", "standalone_source_catalog.yml") ) source_almanac = new_almanac.get_almanac_info("just_the_small_sky_source_catalog") assert len(source_almanac.objects) == 0 @@ -132,9 +121,7 @@ def test_linked_catalogs_source(default_almanac, test_data_dir): def test_linked_catalogs_association(default_almanac): """Check that read almanac entries are fully linked to one another.""" - association_almanac = default_almanac.get_almanac_info( - "small_sky_to_small_sky_order1" - ) + association_almanac = default_almanac.get_almanac_info("small_sky_to_small_sky_order1") assert association_almanac.catalog_name == "small_sky_to_small_sky_order1" primary_almanac = association_almanac.primary_link @@ -185,9 +172,7 @@ def test_get_catalog(default_almanac): def test_get_catalog_exceptions(test_data_dir): """Test that we can create almanac entries, where catalogs might not exist.""" - bad_catalog_path_file = os.path.join( - test_data_dir, "almanac_exception", "bad_catalog_path.yml" - ) + bad_catalog_path_file = os.path.join(test_data_dir, "almanac_exception", "bad_catalog_path.yml") bad_links = Almanac(include_default_dir=False, dirs=bad_catalog_path_file) assert len(bad_links.catalogs()) == 1 diff --git a/tests/hipscat/inspection/test_almanac_info.py b/tests/hipscat/inspection/test_almanac_info.py index 81379ec5..32559e06 100644 --- a/tests/hipscat/inspection/test_almanac_info.py +++ b/tests/hipscat/inspection/test_almanac_info.py @@ -29,9 +29,7 @@ def test_write_to_file(tmp_path, association_catalog_path): almanac_info.write_to_file(tmp_path, default_dir=False) - new_info = AlmanacInfo.from_file( - os.path.join(tmp_path, "small_sky_to_small_sky_order1.yml") - ) + new_info = AlmanacInfo.from_file(os.path.join(tmp_path, "small_sky_to_small_sky_order1.yml")) assert new_info.catalog_name == almanac_info.catalog_name @@ -68,9 +66,7 @@ def test_write_to_bad_file(tmp_path, small_sky_dir): almanac_info.write_to_file(default_dir=True) -def test_association_fields( - association_catalog_path, index_catalog_info_file, small_sky_dir -): +def test_association_fields(association_catalog_path, index_catalog_info_file, small_sky_dir): """Test additional text fields tables with primary/join relationships.""" almanac_info = AlmanacInfo.from_catalog_dir(association_catalog_path) assert almanac_info.catalog_name == "small_sky_to_small_sky_order1" diff --git a/tests/hipscat/io/conftest.py b/tests/hipscat/io/conftest.py index 50ddbfa3..e328ab54 100644 --- a/tests/hipscat/io/conftest.py +++ b/tests/hipscat/io/conftest.py @@ -40,8 +40,7 @@ def assert_text_file_matches(expected_lines, file_name): ), f"files not the same length ({len(contents)} vs {len(expected_lines)})" for i, expected in enumerate(expected_lines): assert re.match(expected, contents[i]), ( - f"files do not match at line {i+1} " - f"(actual: [{contents[i]}] vs expected: [{expected}])" + f"files do not match at line {i+1} " f"(actual: [{contents[i]}] vs expected: [{expected}])" ) metadata_file.close() diff --git a/tests/hipscat/io/test_paths.py b/tests/hipscat/io/test_paths.py index 75356e52..4001690d 100644 --- a/tests/hipscat/io/test_paths.py +++ b/tests/hipscat/io/test_paths.py @@ -15,9 +15,7 @@ def test_pixel_directory(): def test_pixel_directory_number(): """Simple case with sensical inputs""" expected = "/foo/Norder=0/Dir=0" - result = paths.pixel_directory( - "/foo", pixel_order=0, pixel_number=5, directory_number=0 - ) + result = paths.pixel_directory("/foo", pixel_order=0, pixel_number=5, directory_number=0) assert result == expected result = paths.pixel_directory("/foo", pixel_order=0, directory_number=0) diff --git a/tests/hipscat/io/test_write_metadata.py b/tests/hipscat/io/test_write_metadata.py index 75129024..1dbb9771 100644 --- a/tests/hipscat/io/test_write_metadata.py +++ b/tests/hipscat/io/test_write_metadata.py @@ -3,14 +3,15 @@ import os import shutil -import hipscat.io.file_io as file_io -import hipscat.io.write_metadata as io -import hipscat.pixel_math as hist import numpy.testing as npt import pandas as pd import pyarrow as pa import pyarrow.parquet as pq import pytest + +import hipscat.io.file_io as file_io +import hipscat.io.write_metadata as io +import hipscat.pixel_math as hist from hipscat.pixel_math.healpix_pixel import HealpixPixel @@ -95,7 +96,9 @@ def test_write_provenance_info(assert_text_file_matches, tmp_path, catalog_info) "input_file_names": ["file1", "file2", "file3"], } - io.write_provenance_info(catalog_base_dir=catalog_base_dir, dataset_info=catalog_info, tool_args=tool_args) + io.write_provenance_info( + catalog_base_dir=catalog_base_dir, dataset_info=catalog_info, tool_args=tool_args + ) metadata_filename = os.path.join(catalog_base_dir, "provenance_info.json") assert_text_file_matches(expected_lines, metadata_filename) @@ -144,9 +147,7 @@ def test_write_partition_info_float(assert_text_file_matches, tmp_path): assert_text_file_matches(expected_lines, metadata_filename) -def test_write_parquet_metadata( - tmp_path, small_sky_dir, basic_catalog_parquet_metadata -): +def test_write_parquet_metadata(tmp_path, small_sky_dir, basic_catalog_parquet_metadata): """Copy existing catalog and create new metadata files for it""" catalog_base_dir = os.path.join(tmp_path, "catalog") shutil.copytree( @@ -154,9 +155,7 @@ def test_write_parquet_metadata( catalog_base_dir, ) io.write_parquet_metadata(catalog_base_dir) - check_parquet_schema( - os.path.join(catalog_base_dir, "_metadata"), basic_catalog_parquet_metadata - ) + check_parquet_schema(os.path.join(catalog_base_dir, "_metadata"), basic_catalog_parquet_metadata) ## _common_metadata has 0 row groups check_parquet_schema( os.path.join(catalog_base_dir, "_common_metadata"), @@ -165,9 +164,7 @@ def test_write_parquet_metadata( ) ## Re-write - should still have the same properties. io.write_parquet_metadata(catalog_base_dir) - check_parquet_schema( - os.path.join(catalog_base_dir, "_metadata"), basic_catalog_parquet_metadata - ) + check_parquet_schema(os.path.join(catalog_base_dir, "_metadata"), basic_catalog_parquet_metadata) ## _common_metadata has 0 row groups check_parquet_schema( os.path.join(catalog_base_dir, "_common_metadata"), @@ -176,9 +173,7 @@ def test_write_parquet_metadata( ) -def test_write_parquet_metadata_order1( - tmp_path, small_sky_order1_dir, basic_catalog_parquet_metadata -): +def test_write_parquet_metadata_order1(tmp_path, small_sky_order1_dir, basic_catalog_parquet_metadata): """Copy existing catalog and create new metadata files for it, using a catalog with multiple files.""" temp_path = os.path.join(tmp_path, "catalog") @@ -219,9 +214,7 @@ def test_write_index_parquet_metadata(tmp_path): ) io.write_parquet_metadata(temp_path) - check_parquet_schema( - os.path.join(tmp_path, "index", "_metadata"), index_catalog_parquet_metadata - ) + check_parquet_schema(os.path.join(tmp_path, "index", "_metadata"), index_catalog_parquet_metadata) ## _common_metadata has 0 row groups check_parquet_schema( os.path.join(tmp_path, "index", "_common_metadata"), diff --git a/tests/hipscat/pixel_math/test_hipscat_id.py b/tests/hipscat/pixel_math/test_hipscat_id.py index d69f7b23..1436e862 100644 --- a/tests/hipscat/pixel_math/test_hipscat_id.py +++ b/tests/hipscat/pixel_math/test_hipscat_id.py @@ -3,8 +3,8 @@ import numpy as np import numpy.testing as npt import pytest -from hipscat.pixel_math.hipscat_id import (compute_hipscat_id, - hipscat_id_to_healpix) + +from hipscat.pixel_math.hipscat_id import compute_hipscat_id, hipscat_id_to_healpix def test_single(): @@ -50,9 +50,7 @@ def test_list(): Interspersed are points at (1,1), which will start at 5476738131329810432 (0x4C0148503DC00000) """ - result = compute_hipscat_id( - [5, 5, 5, 1, 5, 5, 5, 1, 5], [5, 5, 5, 1, 5, 5, 5, 1, 5] - ) + result = compute_hipscat_id([5, 5, 5, 1, 5, 5, 5, 1, 5], [5, 5, 5, 1, 5, 5, 5, 1, 5]) expected = [ 5482513871577022464, 5482513871577022465, diff --git a/tests/hipscat/pixel_math/test_partition_stats.py b/tests/hipscat/pixel_math/test_partition_stats.py index 6ae0e65e..b49344a2 100644 --- a/tests/hipscat/pixel_math/test_partition_stats.py +++ b/tests/hipscat/pixel_math/test_partition_stats.py @@ -84,9 +84,7 @@ def test_alignment_lowest_order_too_large(): """Check that the method raises error when some pixel exceeds the threshold.""" initial_histogram = hist.empty_histogram(1) with pytest.raises(ValueError, match="lowest_order"): - hist.generate_alignment( - initial_histogram, highest_order=1, lowest_order=2, threshold=20 - ) + hist.generate_alignment(initial_histogram, highest_order=1, lowest_order=2, threshold=20) def test_alignment_exceeds_threshold_order2(): @@ -156,16 +154,12 @@ def test_alignment_small_sky_order2(): def test_alignment_even_sky(): """Create alignment from an even distribution at order 8""" initial_histogram = np.full(hp.order2npix(8), 10) - result = hist.generate_alignment( - initial_histogram, highest_order=8, threshold=1_000 - ) + result = hist.generate_alignment(initial_histogram, highest_order=8, threshold=1_000) # everything maps to order 5, given the density for mapping in result: assert mapping[0] == 5 - result = hist.generate_alignment( - initial_histogram, highest_order=8, lowest_order=7, threshold=1_000 - ) + result = hist.generate_alignment(initial_histogram, highest_order=8, lowest_order=7, threshold=1_000) # everything maps to order 7 (would be 5, but lowest of 7 is enforced) for mapping in result: assert mapping[0] == 7 @@ -215,20 +209,17 @@ def test_compute_pixel_map_order1(): def test_compute_pixel_map_even_sky(): """Create alignment from an even distribution at order 8""" initial_histogram = np.full(hp.order2npix(8), 10) - result = hist.compute_pixel_map( - initial_histogram, highest_order=8, threshold=1_000 - ) + result = hist.compute_pixel_map(initial_histogram, highest_order=8, threshold=1_000) # everything maps to order 5, given the density for mapping in result: assert mapping.order == 5 - result = hist.compute_pixel_map( - initial_histogram, highest_order=8, lowest_order=7, threshold=1_000 - ) + result = hist.compute_pixel_map(initial_histogram, highest_order=8, lowest_order=7, threshold=1_000) # everything maps to order 7 (would be 5, but lowest of 7 is enforced) for mapping in result: assert mapping.order == 7 + def test_compute_pixel_map_invalid_inputs(): """Create destination pixel map for small sky at order 1""" @@ -246,9 +237,7 @@ def test_compute_pixel_map_invalid_inputs(): ## lowest_order too large with pytest.raises(ValueError, match="lowest_order"): - hist.compute_pixel_map( - initial_histogram, highest_order=1, lowest_order=2, threshold=30 - ) + hist.compute_pixel_map(initial_histogram, highest_order=1, lowest_order=2, threshold=30) def test_generate_constant_pixel_map(): @@ -257,9 +246,7 @@ def test_generate_constant_pixel_map(): initial_histogram = np.asarray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 131]) expected = {HealpixPixel(0, 11): (131, [11])} - result = hist.generate_constant_pixel_map( - initial_histogram, constant_healpix_order=0 - ) + result = hist.generate_constant_pixel_map(initial_histogram, constant_healpix_order=0) npt.assert_array_equal(result, expected) initial_histogram = hist.empty_histogram(1) @@ -271,9 +258,7 @@ def test_generate_constant_pixel_map(): HealpixPixel(1, 46): (51, [46]), } - result = hist.generate_constant_pixel_map( - initial_histogram, constant_healpix_order=1 - ) + result = hist.generate_constant_pixel_map(initial_histogram, constant_healpix_order=1) npt.assert_array_equal(result, expected) diff --git a/tests/hipscat/pixel_math/test_pixel_margins.py b/tests/hipscat/pixel_math/test_pixel_margins.py index 134cb4a7..91bde864 100644 --- a/tests/hipscat/pixel_math/test_pixel_margins.py +++ b/tests/hipscat/pixel_math/test_pixel_margins.py @@ -128,10 +128,7 @@ def test_edge_negative_value(): with pytest.raises(ValueError) as value_error: pm.get_edge(2, 5, -1) - assert ( - str(value_error.value) - == "edge can only be values between 0 and 7 (see docstring)" - ) + assert str(value_error.value) == "edge can only be values between 0 and 7 (see docstring)" def test_edge_greater_than_7(): @@ -139,10 +136,7 @@ def test_edge_greater_than_7(): with pytest.raises(ValueError) as value_error: pm.get_edge(2, 5, 8) - assert ( - str(value_error.value) - == "edge can only be values between 0 and 7 (see docstring)" - ) + assert str(value_error.value) == "edge can only be values between 0 and 7 (see docstring)" def test_pixel_is_polar_north(): diff --git a/tests/hipscat/pixel_tree/test_pixel_node.py b/tests/hipscat/pixel_tree/test_pixel_node.py index c0893daa..60b935ca 100644 --- a/tests/hipscat/pixel_tree/test_pixel_node.py +++ b/tests/hipscat/pixel_tree/test_pixel_node.py @@ -16,9 +16,7 @@ def test_create_root_node(root_pixel_node_data): check_node_data_correct(root_node, root_pixel_node_data) -def test_create_root_node_with_parent_raises_error( - root_pixel_node, root_pixel_node_data -): +def test_create_root_node_with_parent_raises_error(root_pixel_node, root_pixel_node_data): with pytest.raises(ValueError): root_pixel_node_data["parent"] = root_pixel_node PixelNode(**root_pixel_node_data) @@ -26,9 +24,7 @@ def test_create_root_node_with_parent_raises_error( def test_create_root_node_with_wrong_order_raises_error(root_pixel_node_data): with pytest.raises(ValueError): - root_pixel_node_data["pixel"] = HealpixPixel( - 1, root_pixel_node_data["pixel"].pixel - ) + root_pixel_node_data["pixel"] = HealpixPixel(1, root_pixel_node_data["pixel"].pixel) PixelNode(**root_pixel_node_data) @@ -62,31 +58,23 @@ def test_create_leaf_node_with_no_parent_raises_error(leaf_pixel_node_data): def test_create_leaf_node_with_negative_hp_order_raises_error(leaf_pixel_node_data): with pytest.raises(ValueError): - leaf_pixel_node_data["pixel"] = HealpixPixel( - -1, leaf_pixel_node_data["pixel"].pixel - ) + leaf_pixel_node_data["pixel"] = HealpixPixel(-1, leaf_pixel_node_data["pixel"].pixel) PixelNode(**leaf_pixel_node_data) def test_create_leaf_node_with_negative_hp_pixel_raises_error(leaf_pixel_node_data): with pytest.raises(ValueError): - leaf_pixel_node_data["pixel"] = HealpixPixel( - leaf_pixel_node_data["pixel"].order, -1 - ) + leaf_pixel_node_data["pixel"] = HealpixPixel(leaf_pixel_node_data["pixel"].order, -1) PixelNode(**leaf_pixel_node_data) -def test_create_node_with_wrong_order_parent_raises_error( - leaf_pixel_node_data, root_pixel_node -): +def test_create_node_with_wrong_order_parent_raises_error(leaf_pixel_node_data, root_pixel_node): with pytest.raises(ValueError): leaf_pixel_node_data["parent"] = root_pixel_node PixelNode(**leaf_pixel_node_data) -def test_add_child_node_with_wrong_child_parent_raises_error( - leaf_pixel_node, root_pixel_node -): +def test_add_child_node_with_wrong_child_parent_raises_error(leaf_pixel_node, root_pixel_node): with pytest.raises(ValueError): root_pixel_node.add_child_node(leaf_pixel_node) @@ -152,9 +140,7 @@ def test_get_leaf_descendants_with_root_node(root_pixel_node, leaf_pixel_node): assert root_pixel_node.get_all_leaf_descendants() == [leaf_pixel_node] -def test_get_leaf_descendants_with_multiple_leafs( - root_pixel_node, leaf_pixel_node, leaf_pixel_node_data -): +def test_get_leaf_descendants_with_multiple_leafs(root_pixel_node, leaf_pixel_node, leaf_pixel_node_data): leaf_pixel_node_data["pixel"] = HealpixPixel( leaf_pixel_node_data["pixel"].order, leaf_pixel_node_data["pixel"].pixel + 1 ) diff --git a/tests/hipscat/pixel_tree/test_pixel_tree_builder.py b/tests/hipscat/pixel_tree/test_pixel_tree_builder.py index d16e1f2e..6c9dda26 100644 --- a/tests/hipscat/pixel_tree/test_pixel_tree_builder.py +++ b/tests/hipscat/pixel_tree/test_pixel_tree_builder.py @@ -32,9 +32,7 @@ def test_pixel_tree_small_sky(small_sky_catalog, small_sky_pixels): def test_pixel_tree_small_sky_order1(small_sky_order1_catalog, small_sky_order1_pixels): """test pixel tree on small sky order1""" - pixel_tree = PixelTreeBuilder.from_partition_info_df( - small_sky_order1_catalog.get_pixels() - ) + pixel_tree = PixelTreeBuilder.from_partition_info_df(small_sky_order1_catalog.get_pixels()) assert_pixel_tree_has_nodes_in_catalog(pixel_tree, small_sky_order1_catalog) first_pixel = pixel_tree.get_node(small_sky_order1_pixels[0]) second_pixel = pixel_tree.get_node(small_sky_order1_pixels[1]) @@ -96,13 +94,9 @@ def test_pixel_builder_retrieve_added_node(): assert tree_builder.get_node((order, pixel)).parent == tree_builder.root_pixel assert tree_builder.get_node((order, pixel)).hp_order == order assert tree_builder.get_node((order, pixel)).hp_pixel == pixel - assert tree_builder.get_node(HealpixPixel(order, pixel)) == tree_builder.get_node( - (order, pixel) - ) + assert tree_builder.get_node(HealpixPixel(order, pixel)) == tree_builder.get_node((order, pixel)) assert tree_builder[(order, pixel)] == tree_builder.get_node((order, pixel)) - assert tree_builder[HealpixPixel(order, pixel)] == tree_builder.get_node( - (order, pixel) - ) + assert tree_builder[HealpixPixel(order, pixel)] == tree_builder.get_node((order, pixel)) def test_pixel_builder_retrieve_none_node(): diff --git a/tests/hipscat/util/test_healpix_pixel_convertor.py b/tests/hipscat/util/test_healpix_pixel_convertor.py index 8ad6d2bf..c07daa32 100644 --- a/tests/hipscat/util/test_healpix_pixel_convertor.py +++ b/tests/hipscat/util/test_healpix_pixel_convertor.py @@ -1,7 +1,7 @@ import pytest + from hipscat.pixel_math import HealpixPixel -from hipscat.pixel_math.healpix_pixel_convertor import (HealpixInputTypes, - get_healpix_pixel) +from hipscat.pixel_math.healpix_pixel_convertor import HealpixInputTypes, get_healpix_pixel def return_healpix(pixel: HealpixInputTypes):