Skip to content

Commit 01164e7

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent c78fe2c commit 01164e7

File tree

14 files changed

+632
-421
lines changed

14 files changed

+632
-421
lines changed

.github/dependabot.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,3 @@ updates:
99
actions:
1010
patterns:
1111
- "*"
12-

.github/workflows/ci.yml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,3 @@ jobs:
4141

4242
- name: Test package
4343
run: pytest
44-
45-
46-

.pre-commit-config.yaml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,4 +22,3 @@ repos:
2222
- id: ruff-format
2323
- id: ruff-check
2424
args: ["--fix", "--show-fixes"]
25-

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,12 @@
22

33
This is the lightcurves repository. Check it out: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1OqafFK4FQA_tBwTTnYMG-1D5uhTQ5X0D#scrollTo=european-mechanism) <br>
44
See here for scientific application of this code:
5-
https://pos.sissa.it/395/868
5+
https://pos.sissa.it/395/868
66

77
## LC.py
8-
Initialize a LightCurve object based on time, flux and flux_error.
8+
Initialize a LightCurve object based on time, flux and flux_error.
99
Study its Bayesian block representation (based on Scargle et al. 2013 https://ui.adsabs.harvard.edu/abs/2013arXiv1304.2818S/abstract ).<br>
10-
Characterize flares (start, peak, end time) with the HOP algorithm (following Meyer et al. 2019 https://ui.adsabs.harvard.edu/abs/2019ApJ...877...39M/abstract ). There are four different methods to define flares (baseline, half, flip, sharp) as illustrated in the Jupyter Notebook.
10+
Characterize flares (start, peak, end time) with the HOP algorithm (following Meyer et al. 2019 https://ui.adsabs.harvard.edu/abs/2019ApJ...877...39M/abstract ). There are four different methods to define flares (baseline, half, flip, sharp) as illustrated in the Jupyter Notebook.
1111

1212
## HOP.py
1313
Initialize a Hopject to consider parameters of an individual flare.

illustration_lightcurve.ipynb

Lines changed: 62 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -26,17 +26,22 @@
2626
"metadata": {},
2727
"outputs": [],
2828
"source": [
29+
"from __future__ import annotations\n",
30+
"\n",
2931
"import numpy as np\n",
30-
"from matplotlib import pyplot as plt\n",
3132
"from IPython.core.pylabtools import figsize\n",
32-
"figsize(10,3)\n",
33+
"from matplotlib import pyplot as plt\n",
34+
"\n",
35+
"figsize(10, 3)\n",
3336
"\n",
3437
"try:\n",
35-
" from LC import LightCurve\n",
3638
" from HOP import Hopject\n",
39+
" from LC import LightCurve\n",
3740
" from LC_Set import LC_Set\n",
3841
"except ModuleNotFoundError:\n",
39-
" print('If you installed the zipped version from GitHub, you have to delete \"lightcurves.\" in the import statements of the above files')"
42+
" print(\n",
43+
" 'If you installed the zipped version from GitHub, you have to delete \"lightcurves.\" in the import statements of the above files'\n",
44+
" )"
4045
]
4146
},
4247
{
@@ -77,8 +82,8 @@
7782
}
7883
],
7984
"source": [
80-
"time, flux, flux_error = np.load('./lc.npy') #mock lc with gaussian flares\n",
81-
"time += 55000 #e.g. MJD\n",
85+
"time, flux, flux_error = np.load(\"./lc.npy\") # mock lc with gaussian flares\n",
86+
"time += 55000 # e.g. MJD\n",
8287
"lc = LightCurve(time, flux, flux_error)\n",
8388
"lc.plot_lc()\n",
8489
"plt.legend()"
@@ -122,8 +127,10 @@
122127
}
123128
],
124129
"source": [
125-
"block_pbin, block_val, block_val_error, edge_index, edges = lc.get_bblocks(p0_value=0.05)\n",
126-
"#for more information on edges and edge_index see Appendix A2\n",
130+
"block_pbin, block_val, block_val_error, edge_index, edges = lc.get_bblocks(\n",
131+
" p0_value=0.05\n",
132+
")\n",
133+
"# for more information on edges and edge_index see Appendix A2\n",
127134
"lc.plot_bblocks()\n",
128135
"plt.legend()"
129136
]
@@ -173,8 +180,8 @@
173180
}
174181
],
175182
"source": [
176-
"hops_bl = lc.find_hop('baseline')\n",
177-
"print('hops_bl is a list of Hopjects with interesting attributes, see HOP.py')\n",
183+
"hops_bl = lc.find_hop(\"baseline\")\n",
184+
"print(\"hops_bl is a list of Hopjects with interesting attributes, see HOP.py\")\n",
178185
"print(hops_bl)\n",
179186
"\n",
180187
"hop = hops_bl[0]\n",
@@ -220,8 +227,8 @@
220227
}
221228
],
222229
"source": [
223-
"hops_half = lc.find_hop('half', lc_edges='add')\n",
224-
"#one could add the edges of the light curve as start and end of HOP if there is a peak\n",
230+
"hops_half = lc.find_hop(\"half\", lc_edges=\"add\")\n",
231+
"# one could add the edges of the light curve as start and end of HOP if there is a peak\n",
225232
"\n",
226233
"hop = hops_half[0]\n",
227234
"print(hop.start_time, hop.peak_time, hop.end_time)\n",
@@ -266,9 +273,9 @@
266273
}
267274
],
268275
"source": [
269-
"hops_flip = lc.find_hop('flip', lc_edges='neglect') \n",
270-
"#or one could conservatively neglect the edges of the light curve \n",
271-
"#because it is uncertain whether there would be more decrease/increase of flux\n",
276+
"hops_flip = lc.find_hop(\"flip\", lc_edges=\"neglect\")\n",
277+
"# or one could conservatively neglect the edges of the light curve\n",
278+
"# because it is uncertain whether there would be more decrease/increase of flux\n",
272279
"\n",
273280
"hop = hops_flip[0]\n",
274281
"print(hop.start_time, hop.peak_time, hop.end_time)\n",
@@ -323,7 +330,7 @@
323330
}
324331
],
325332
"source": [
326-
"hops_sharp = lc.find_hop('sharp', lc_edges='neglect') \n",
333+
"hops_sharp = lc.find_hop(\"sharp\", lc_edges=\"neglect\")\n",
327334
"\n",
328335
"hop = hops_sharp[0]\n",
329336
"print(hop.start_time, hop.peak_time, hop.end_time)\n",
@@ -361,7 +368,7 @@
361368
}
362369
],
363370
"source": [
364-
"lc.plot_all_hop(lc_edges='neglect')"
371+
"lc.plot_all_hop(lc_edges=\"neglect\")"
365372
]
366373
},
367374
{
@@ -400,7 +407,7 @@
400407
],
401408
"source": [
402409
"hop = hops_flip[3]\n",
403-
"print('asymmetry measure: ', hop.asym)\n",
410+
"print(\"asymmetry measure: \", hop.asym)\n",
404411
"hop.plot_hop()"
405412
]
406413
},
@@ -549,30 +556,35 @@
549556
}
550557
],
551558
"source": [
552-
"time = np.arange(0, 2000, 1) # e.g. daily binning for 12 years = 4380 days of observation\n",
553-
"N_b=len(time) # number of bins\n",
559+
"time = np.arange(\n",
560+
" 0, 2000, 1\n",
561+
") # e.g. daily binning for 12 years = 4380 days of observation\n",
562+
"N_b = len(time) # number of bins\n",
554563
"flux = np.ones(N_b)\n",
555-
"rel_flux_error = 0.1 # flux error ~ 5% \n",
556-
"noise = np.random.normal(0, 0.07, N_b) # white noise backround with strength \n",
564+
"rel_flux_error = 0.1 # flux error ~ 5%\n",
565+
"noise = np.random.normal(0, 0.07, N_b) # white noise backround with strength\n",
557566
"\n",
558567
"# maximum values for gaussian flares\n",
559568
"amp_max = 100\n",
560569
"cen_max = 4380\n",
561570
"wid_max = 40\n",
562571
"\n",
572+
"\n",
563573
"def gaussian(x, amp, cen, wid):\n",
564574
" \"1-d Gaussian: gaussian(x=data, amp, cen, wid)\"\n",
565-
" return (amp / (np.sqrt(2*np.pi) * wid)) * np.exp(-(x-cen)**2 / (2*wid**2))\n",
575+
" return (amp / (np.sqrt(2 * np.pi) * wid)) * np.exp(-((x - cen) ** 2) / (2 * wid**2))\n",
576+
"\n",
566577
"\n",
567578
"def rand_gauss(x, amp_max, cen_max, wid_max):\n",
568579
" \"random 1-d Gaussian\"\n",
569580
" \"takes maximum values for parameters and multiplies each with uniform random number [0,1) ... 0 aba kee1\"\n",
570581
" \"https://numpy.org/doc/stable/reference/random/generated/numpy.random.random.html\"\n",
571582
" amp = np.random.random(1) * amp_max\n",
572583
" cen = np.random.random(1) * cen_max\n",
573-
" #wid = np.random.random(1) * wid_max\n",
574-
" wid = amp*0.25 # assume that flares are self similar; always same shape\n",
575-
" return(gaussian(x, amp, cen, wid))\n",
584+
" # wid = np.random.random(1) * wid_max\n",
585+
" wid = amp * 0.25 # assume that flares are self similar; always same shape\n",
586+
" return gaussian(x, amp, cen, wid)\n",
587+
"\n",
576588
"\n",
577589
"lc_array = np.zeros(10, dtype=object)\n",
578590
"for l in range(10):\n",
@@ -584,15 +596,15 @@
584596
" flux_error = flux * rel_flux_error\n",
585597
" lc = LightCurve(time, flux, flux_error)\n",
586598
" lc.get_bblocks()\n",
587-
" lc.find_hop('flip')\n",
599+
" lc.find_hop(\"flip\")\n",
588600
" lc_array[l] = lc\n",
589-
" \n",
601+
"\n",
590602
" plt.figure(l)\n",
591603
" lc.plot_lc()\n",
592604
" lc.plot_bblocks()\n",
593605
" lc.plot_hop()\n",
594-
" plt.xlabel('Time')\n",
595-
" plt.ylabel('Flux')"
606+
" plt.xlabel(\"Time\")\n",
607+
" plt.ylabel(\"Flux\")"
596608
]
597609
},
598610
{
@@ -633,11 +645,11 @@
633645
}
634646
],
635647
"source": [
636-
"lcs = LC_Set(lc_array, block_min = 2) #single block flares do not resolve asymmetry\n",
648+
"lcs = LC_Set(lc_array, block_min=2) # single block flares do not resolve asymmetry\n",
637649
"lcs.plot_asym()\n",
638650
"plt.legend()\n",
639-
"plt.ylabel('probability density')\n",
640-
"plt.xlabel('asymmetry measure')\n",
651+
"plt.ylabel(\"probability density\")\n",
652+
"plt.xlabel(\"asymmetry measure\")\n",
641653
"# for histogram binning see https://docs.astropy.org/en/stable/api/astropy.visualization.hist.html"
642654
]
643655
},
@@ -712,27 +724,29 @@
712724
}
713725
],
714726
"source": [
715-
"time, flux, flux_error = np.load('./lc.npy') #mock lc with gaussian flares\n",
716-
"time += 55000 #e.g. MJD\n",
717-
"fig = plt.figure(0,(10,3))\n",
718-
"time_one = np.ones(len(time)) #used for plotting\n",
727+
"time, flux, flux_error = np.load(\"./lc.npy\") # mock lc with gaussian flares\n",
728+
"time += 55000 # e.g. MJD\n",
729+
"fig = plt.figure(0, (10, 3))\n",
730+
"time_one = np.ones(len(time)) # used for plotting\n",
719731
"ax0 = fig.add_subplot(211)\n",
720-
"plt.plot(time,time_one,linewidth=0,label='data bin',marker='+',c = 'blue', markersize=10)\n",
721-
"plt.axvline(edges[0],color='red',label='edges',linewidth=1)\n",
722-
"for i in range(1,len(edges)):\n",
723-
" plt.axvline(edges[i],color='red',linewidth=1)\n",
724-
"plt.xlabel('Time/ MJD', fontsize = 14)\n",
732+
"plt.plot(\n",
733+
" time, time_one, linewidth=0, label=\"data bin\", marker=\"+\", c=\"blue\", markersize=10\n",
734+
")\n",
735+
"plt.axvline(edges[0], color=\"red\", label=\"edges\", linewidth=1)\n",
736+
"for i in range(1, len(edges)):\n",
737+
" plt.axvline(edges[i], color=\"red\", linewidth=1)\n",
738+
"plt.xlabel(\"Time/ MJD\", fontsize=14)\n",
725739
"plt.yticks([])\n",
726740
"ax0.xaxis.tick_top()\n",
727-
"ax0.xaxis.set_label_position('top') \n",
741+
"ax0.xaxis.set_label_position(\"top\")\n",
728742
"plt.legend()\n",
729743
"\n",
730744
"ax1 = fig.add_subplot(212)\n",
731-
"plt.plot(time_one,linewidth=0,label='data bin',marker='+',c = 'blue', markersize=10)\n",
732-
"plt.axvline(edge_index[0],color='green',label='edge_index',linewidth=1)\n",
733-
"for i in range(1,len(edge_index)):\n",
734-
" plt.axvline(edge_index[i],color='green',linewidth=1)\n",
735-
"plt.xlabel('Time array index', fontsize = 14)\n",
745+
"plt.plot(time_one, linewidth=0, label=\"data bin\", marker=\"+\", c=\"blue\", markersize=10)\n",
746+
"plt.axvline(edge_index[0], color=\"green\", label=\"edge_index\", linewidth=1)\n",
747+
"for i in range(1, len(edge_index)):\n",
748+
" plt.axvline(edge_index[i], color=\"green\", linewidth=1)\n",
749+
"plt.xlabel(\"Time array index\", fontsize=14)\n",
736750
"plt.yticks([])\n",
737751
"plt.legend()\n",
738752
"plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.2, hspace=0)"

pyproject.toml

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ classifiers = [
2727

2828
requires-python = ">=3.10"
2929

30-
#list of dependencies.
30+
#list of dependencies.
3131
#do not list imports from standard library like logging or pickle
3232
dependencies = [
3333
"astropy",
@@ -87,8 +87,3 @@ isort.required-imports = ["from __future__ import annotations"]
8787

8888
[tool.ruff.lint.per-file-ignores]
8989
"tests/**" = ["T20"]
90-
91-
92-
93-
94-

0 commit comments

Comments
 (0)