hats.catalog.dataset.table_properties#

Attributes#

Classes#

TableProperties

Container class for catalog metadata

Module Contents#

CATALOG_TYPE_REQUIRED_FIELDS[source]#
class TableProperties(/, **data: Any)[source]#

Bases: pydantic.BaseModel

Container class for catalog metadata

catalog_name: str = None[source]#
catalog_type: hats.catalog.catalog_type.CatalogType = None[source]#
total_rows: int | None = None[source]#
ra_column: str | None = None[source]#
dec_column: str | None = None[source]#
default_columns: list[str] | None = None[source]#

Which columns should be read from parquet files, when user doesn’t otherwise specify.

healpix_column: str | None = None[source]#

Column name that provides a spatial index of healpix values at some fixed, high order. A typical value would be _healpix_29, but can vary.

healpix_order: int | None = None[source]#

For the spatial index of healpix values in hats_col_healpix what is the fixed, high order. A typicaly value would be 29, but can vary.

primary_catalog: str | None = None[source]#

Reference to object catalog. Relevant for nested, margin, association, and index.

margin_threshold: float | None = None[source]#

Threshold of the pixel boundary, expressed in arcseconds.

primary_column: str | None = None[source]#

Column name in the primary (left) side of join.

primary_column_association: str | None = None[source]#

Column name in the association table that matches the primary (left) side of join.

join_catalog: str | None = None[source]#

Catalog name for the joining (right) side of association.

join_column: str | None = None[source]#

Column name in the joining (right) side of join.

join_column_association: str | None = None[source]#

Column name in the association table that matches the joining (right) side of join.

assn_max_separation: float | None = None[source]#

The maximum separation between two points in an association catalog, expressed in arcseconds.

contains_leaf_files: bool | None = None[source]#

Whether or not the association catalog contains leaf parquet files.

indexing_column: str | None = None[source]#

Column that we provide an index over.

extra_columns: list[str] | None = None[source]#

Any additional payload columns included in index.

npix_suffix: str = None[source]#

Suffix of the Npix partitions. In the standard HATS directory structure, this is '.parquet' because there is a single file in each Npix partition and it is named like 'Npix=313.parquet'. Other valid directory structures include those with the same single file per partition but which use a different suffix (e.g., 'npix_suffix' = '.parq' or '.snappy.parquet'), and also those in which the Npix partitions are actually directories containing 1+ files underneath (and then 'npix_suffix' = '/').

skymap_order: int | None = None[source]#

Nested Order of the healpix skymap stored in the default skymap.fits.

skymap_alt_orders: list[int] | None = None[source]#

Nested Order (K) of the healpix skymaps stored in altnernative skymap.K.fits.

model_config[source]#

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

classmethod space_delimited_list(str_value: str) list[str][source]#

Convert a space-delimited list string into a python list of strings.

Parameters:
str_value: str

a space-delimited list string

Returns:
list[str]

python list of strings

classmethod space_delimited_int_list(str_value: str | list[int]) list[int][source]#

Convert a space-delimited list string into a python list of integers.

Parameters:
str_valuestr | list[int]

string representation of a list of integers, delimited by space, comma, or semicolon, or a list of integers.

Returns:
list[int]

a python list of integers

Raises:
ValueError

if any non-digit characters are encountered

serialize_as_space_delimited_list(str_list: Iterable) str[source]#

Convert a python list of strings into a space-delimited string.

Parameters:
str_list: Iterable

a python list of strings

Returns:
str

a space-delimited string.

check_required() typing_extensions.Self[source]#

Check that type-specific fields are appropriate, and required fields are set.

copy_and_update(**kwargs)[source]#

Create a validated copy of these table properties, updating the fields provided in kwargs.

Parameters:
**kwargs

values to update

Returns:
TableProperties

new instance of properties object

explicit_dict(by_alias=False, exclude_none=True)[source]#

Create a dict, based on fields that have been explicitly set, and are not “extra” keys.

Parameters:
by_aliasbool

(Default value = False)

exclude_nonebool

(Default value = True)

Returns:
dict

all keys that are attributes of this class and not “extra”.

extra_dict(by_alias=False, exclude_none=True)[source]#

Create a dict, based on fields that are “extra” keys.

Parameters:
by_aliasbool

(Default value = False)

exclude_nonebool

(Default value = True)

Returns:
dict

all keys that are not attributes of this class, e.g. “extra”.

__repr__()[source]#
__str__()[source]#

Friendly string representation based on named fields.

classmethod read_from_dir(catalog_dir: str | pathlib.Path | upath.UPath) typing_extensions.Self[source]#

Read field values from a java-style properties file.

Parameters:
catalog_dir: str | Path | UPath

path to a catalog directory.

Returns:
TableProperties

object created from the contents of a hats.properties file in the given directory

to_properties_file(catalog_dir: str | pathlib.Path | upath.UPath)[source]#

Write fields to a java-style properties file.

Parameters:
catalog_dir: str | Path | UPath

directory to write the file

static new_provenance_dict(path: str | pathlib.Path | upath.UPath | None = None, builder: str | None = None, **kwargs) dict[source]#

Constructs the provenance properties for a HATS catalog.

Parameters:
path: str | Path | UPath | None

The path to the catalog directory.

builderstr | None

The name and version of the tool that created the catalog.

**kwargs

Additional properties to include/override in the dictionary.

Returns:
dict

A dictionary with properties for the HATS catalog.