Skip to content

Commit 52f80ef

Browse files
Merge pull request #10 from swagner-astro/pre-commit-ci-update-config
[pre-commit.ci] pre-commit autoupdate
2 parents 3f680c0 + 2d5bf03 commit 52f80ef

File tree

4 files changed

+894
-894
lines changed

4 files changed

+894
-894
lines changed

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
repos:
22
- repo: https://github.com/pre-commit/pre-commit-hooks
3-
rev: "v5.0.0"
3+
rev: "v6.0.0"
44
hooks:
55
- id: check-added-large-files
66
- id: check-case-conflict
@@ -16,7 +16,7 @@ repos:
1616
- id: trailing-whitespace
1717

1818
- repo: https://github.com/astral-sh/ruff-pre-commit
19-
rev: "v0.12.5"
19+
rev: "v0.12.9"
2020
hooks:
2121
# id: ruff-check would go here if using both
2222
- id: ruff-format

illustration_lightcurve.ipynb

Lines changed: 879 additions & 880 deletions
Large diffs are not rendered by default.

src/lightcurves/LC.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,13 @@
22

33
import logging
44
import pickle
5+
from pathlib import Path
56

67
import astropy
78
import astropy.stats.bayesian_blocks as bblocks
89
import numpy as np
910
from matplotlib import pyplot as plt
1011
from matplotlib.axes import Axes # for type hints only
11-
from pathlib import Path
12-
1312

1413
# https://docs.astropy.org/en/stable/api/astropy.stats.bayesian_blocks.html
1514
import lightcurves.HopFinder as hf
@@ -37,7 +36,8 @@ def load_lc(path: str | Path) -> LightCurve:
3736
with path.open("rb") as f:
3837
return pickle.load(f)
3938

40-
'''
39+
40+
'''
4141
this following is exactly like the previous one.. might not be necessary
4242
def load_lc_npy(path: str | Path) -> LightCurve:
4343
"""
@@ -52,6 +52,7 @@ def load_lc_npy(path: str | Path) -> LightCurve:
5252
return pickle.load(f)
5353
'''
5454

55+
5556
def load_lc_csv(path: str) -> LightCurve:
5657
"""
5758
Load a pickled LightCurve instance from a CSV file saved with `save_csv()`.
@@ -153,8 +154,8 @@ def clean_data(
153154
ts_ = ts[nan_mask]
154155
ts_clean = ts_[time_unique_id]
155156
return (time_unique, flux_clean, flux_error_clean, ts_clean)
156-
else:
157-
return (time_unique, flux_clean, flux_error_clean, None)
157+
return (time_unique, flux_clean, flux_error_clean, None)
158+
158159

159160
def get_gti_iis(
160161
time: np.ndarray, n_gaps: int, n_pick: int | None
@@ -316,7 +317,7 @@ def __init__(
316317
friendly_error = "Input arrays do not have same length"
317318
raise ValueError(friendly_error)
318319
if len(flux[np.isnan(flux)]) > 0 or len(flux_error[np.isnan(flux_error)]) > 0:
319-
friendly_error = "flux or flux_error contain np.nan values"
320+
friendly_error = "flux or flux_error contain np.nan values"
320321
raise TypeError(friendly_error)
321322
if len(time) != len(np.unique(time)):
322323
friendly_error = "time contains duplicate values"
@@ -422,7 +423,7 @@ def save_npy(self, path: str) -> None:
422423
Use `load_lc_npy()` to read this file.
423424
This does not update `LC.py`, it saves current object state.
424425
TBD: actaully since this is an object it just saves a pickle that could be called .npy
425-
the save npy business needs to be revisited I think it might be nonesense
426+
the save npy business needs to be revisited I think it might be nonesense
426427
"""
427428
path = Path(path)
428429
with path.open("wb") as pickle_file:
@@ -785,7 +786,7 @@ def get_bblocks_above(
785786
)
786787
except AttributeError:
787788
msg = "Initialize Bayesian blocks with lc.get_bblocks() first!"
788-
raise AttributeError(msg) from err
789+
raise AttributeError(msg) from err
789790

790791
# Merge neighbouring threshold blocks and delete edges
791792
block_mask = np.ones(len(self.block_val), dtype=bool)

src/lightcurves/hop_finder.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,8 @@ def clean(self, peaks, starts, ends, lc):
9191
logging.info("not variable enough, no peak found")
9292
return (None, None, None)
9393
if self.lc_edges == "neglect" and (len(starts) < 1 or len(ends) < 1):
94-
logging.info("not variable enough, missing start or end")
95-
return (None, None, None)
94+
logging.info("not variable enough, missing start or end")
95+
return (None, None, None)
9696
if self.lc_edges == "add":
9797
if len(starts) < 1:
9898
starts = np.insert(starts, 0, lc.edges[0])
@@ -199,8 +199,8 @@ def find_peaks(self, lc):
199199
for i in range(1, len(diff)):
200200
# if previous rising; this falling
201201
if (diff[i - 1] > 0 and diff[i] < 0) and (lc.block_val[i] > lc.baseline):
202-
# peak_time = middle of peak block
203-
peaks.append(lc.edges[i] + (lc.edges[i + 1] - lc.edges[i]) / 2)
202+
# peak_time = middle of peak block
203+
peaks.append(lc.edges[i] + (lc.edges[i + 1] - lc.edges[i]) / 2)
204204
return peaks
205205

206206
def find_start_end(self, lc):

0 commit comments

Comments
 (0)