Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 51 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,57 @@ PFS is a wide-field, multi-object spectrograph capable of simultaneously obtaini

- **Instrument Configuration**: Constants and parameters for the PFS instrument configuration

## Usage

### Database usage

- Authentication: Passwords are expected to be managed externally by libpq (e.g., via ~/.pgpass). The helpers use psycopg through SQLAlchemy and do not embed passwords.

#### Operational DB convenience class

```python
from pfs.utils.database.opdb import OpDB

# Uses default connection settings for the PFS operational DB
opdb = OpDB()

# Returns a dataframe with results.
df = opdb.fetch("SELECT max(pfs_visit_id) FROM pfs_visit")

# Or fetch as an array.
rows = opdb.fetch("SELECT max(pfs_visit_id) FROM pfs_visit", as_dataframe=False)

# Fetch one row with named parameters.
n = opdb.fetch("SELECT :foo AS val", {"foo": 42})

# Insert many rows via a dataframe
df0 = pd.DataFrame([
{"agc_frame_id": 123456, "spot_id": 1, ...},
{"agc_frame_id": 123456, "spot_id": 2, ...},
])
opdb.insert(df=df0, table='agc_match')

# Fetch a dataframe with params.
df1 = opdb.fetch(
sql="SELECT * FROM agc_match WHERE agc_exposure_id = :frame_id",
params=dict(frame_id=123456)
)

# Insert helper must match column names.
opdb.insert("test", id=1, value="i am a test value")

# Reuse one connection for multiple statements
with opdb.connection() as conn:
conn.exec_driver_sql("SET LOCAL statement_timeout = 5000")
conn.exec_driver_sql("SELECT 1")
```

Notes

- Connection pooling: DB/OpDB cache a SQLAlchemy Engine with pooling. Each helper method checks out a connection for the duration of the call. Use db.connection() to explicitly reuse a single connection.
- Result format: fetchone/fetchall return numpy arrays for backward compatibility.


## Installation

### Requirements
Expand Down
39 changes: 25 additions & 14 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,29 +1,29 @@
[build-system]
requires = ["setuptools >= 77.0.3"]
requires = ["setuptools >= 77.0.3", "lsst-versions"]
build-backend = "setuptools.build_meta"

[project]
name = "pfs-utils"
description = "Common util tools for the Subaru PFS DRP"
readme = "README.md"
requires-python = ">=3.11"
requires-python = ">=3.12"
dependencies = [
"astroplan",
"astropy",
"matplotlib",
"numpy",
"pandas",
"pfs-datamodel @ git+https://github.com/Subaru-PFS/datamodel.git",
"psycopg[binary]",
"pytz",
"scipy",
"sqlalchemy",
]
dynamic = ["version"]
[project.optional-dependencies]
dev = [
"black",
"isort",
"lsst-versions",
"pytest>=8.0",
"ruff"
]

[project.urls]
Expand All @@ -33,20 +33,31 @@ dev = [
where = ["python/"]
include = ["pfs", "pfs.*"]

[tool.black]
line-length = 110
target-version = ["py311"]

[tool.isort]
profile = "black"
line_length = 110
known_first_party = ["pfs"]

[tool.lsst_versions]
write_to = "python/pfs/utils/version.py"

[tool.pytest.ini_options]
addopts = "--import-mode=importlib" # Recommended as best practice

[tool.ruff]
line-length = 110

# Ruff configuration to ignore naming capitalization/camelCase rules for functions, methods, and variables
[tool.ruff.lint]
select = ["E", "F", "I"]
ignore = ["N802", "N803", "N806", "N815", "N816", "EM101", "TRY003"]

[tool.pydocstyle]
convention = "numpy"

[tool.hatch.metadata]
allow-direct-references = true

[tool.hatch.envs.test]
# Use the project optional dependency group `dev` so pytest and friends are installed
features = ["dev"]
# Pin to the project's supported Python by default; hatch will still choose a compatible interpreter
python = "3.12"
# Explicit test command; additional args from `hatch test` are passed through as `{args}`
[tool.hatch.envs.test.test]
command = "pytest"
Empty file.
Loading