Skip to content

Commit 6ed1813

Browse files
committed
rename attribute to batch_logl_bounds to avoid confusion with bounds
1 parent 1cdb34a commit 6ed1813

5 files changed

Lines changed: 32 additions & 28 deletions

File tree

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
1616
- Remove custom update_func
1717
- Remove hslice
1818
- Remove 'user-defined' proposal distribution
19+
- Change internal attribute from batch_bounds to batch_logl_bounds
1920
### Fixed
2021

2122
[2.1.5 - 2024-12-17]

py/dynesty/dynamicsampler.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -861,7 +861,7 @@ def results(self):
861861
for k in [
862862
'nc', 'v', 'id', 'batch', 'it', 'u', 'n', 'logwt', 'logl',
863863
'logvol', 'logz', 'logzvar', 'h', 'batch_nlive',
864-
'batch_bounds', 'blob'
864+
'batch_logl_bounds', 'blob'
865865
]:
866866
d[k] = np.array(self.saved_run[k])
867867

@@ -874,7 +874,7 @@ def results(self):
874874
results.append(('samples_' + k, d[k]))
875875
for k in [
876876
'logwt', 'logl', 'logvol', 'logz', 'batch_nlive',
877-
'batch_bounds', 'blob'
877+
'batch_logl_bounds', 'blob'
878878
]:
879879
results.append((k, d[k]))
880880
results.append(('logzerr', np.sqrt(d['logzvar'])))
@@ -1209,7 +1209,7 @@ def sample_initial(self,
12091209
dtype=int) # batch
12101210

12111211
self.saved_run['batch_nlive'].append(self.nlive_init) # initial nlive
1212-
self.saved_run['batch_bounds'].append(
1212+
self.saved_run['batch_logl_bounds'].append(
12131213
(-np.inf, np.inf)) # initial bounds
12141214

12151215
self.internal_state = DynamicSamplerStatesEnum.BASE_DONE
@@ -1473,7 +1473,7 @@ def combine_runs(self):
14731473
nnew = len(new_d['n'])
14741474
llmin, llmax = self.new_logl_min, self.new_logl_max
14751475

1476-
old_batch_bounds = self.saved_run['batch_bounds']
1476+
old_batch_logl_bounds = self.saved_run['batch_logl_bounds']
14771477
old_batch_nlive = self.saved_run['batch_nlive']
14781478
# Reset saved results.
14791479
del self.saved_run
@@ -1587,7 +1587,8 @@ def combine_runs(self):
15871587

15881588
# Saved batch quantities.
15891589
self.saved_run['batch_nlive'] = old_batch_nlive + [(max(new_d['n']))]
1590-
self.saved_run['batch_bounds'] = old_batch_bounds + [((llmin, llmax))]
1590+
self.saved_run['batch_logl_bounds'] = old_batch_logl_bounds + [(
1591+
(llmin, llmax))]
15911592

15921593
def run_nested(self,
15931594
nlive_init=None,

py/dynesty/dynesty.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -528,7 +528,6 @@ def _common_sampler_init(*,
528528
sample, InternalSampler):
529529
raise ValueError("Unknown sampling method: '{0}'".format(sample))
530530

531-
# TODO change this check to deal with new sampler interface
532531
if ncdim != ndim and (isinstance(sample, SliceSampler)
533532
or isinstance(sample, RSliceSampler)):
534533
raise ValueError('ncdim unsupported for slice sampling')

py/dynesty/utils.py

Lines changed: 20 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@ def __init__(self, dynamic=False):
283283
# these are special since their length
284284
# is == the number of batches
285285
'batch_nlive', # number of live points added in batch
286-
'batch_bounds' # loglikelihood bounds used in batch
286+
'batch_logl_bounds' # loglikelihood bounds used in batch
287287
])
288288
for k in keys:
289289
D[k] = []
@@ -606,7 +606,7 @@ def print_fn_fallback(results,
606606
'niter'),
607607
('samples_batch', 'array[int]',
608608
"Tracks the batch during which the samples were proposed", 'niter'),
609-
('batch_bounds', 'array[tuple]',
609+
('batch_logl_bounds', 'array[tuple]',
610610
"The log-likelihood bounds used to run a batch.", 'nbatch'),
611611
('batch_nlive', 'array[int]',
612612
"The number of live points used for given batch", 'nbatch'),
@@ -1363,7 +1363,7 @@ def resample_run(res, rstate=None, return_idx=False):
13631363
# Check if the number of live points explicitly changes.
13641364
samples_n = res.samples_n
13651365
samples_batch = res.samples_batch
1366-
batch_bounds = res.batch_bounds
1366+
batch_logl_bounds = res.batch_logl_bounds
13671367
added_final_live = True
13681368
else:
13691369
# If the number of live points is constant, compute `samples_n` and
@@ -1380,8 +1380,8 @@ def resample_run(res, rstate=None, return_idx=False):
13801380
raise ValueError("Final number of samples differs from number of "
13811381
"iterations and number of live points.")
13821382
samples_batch = np.zeros(len(samples_n), dtype=int)
1383-
batch_bounds = np.array([(-np.inf, np.inf)])
1384-
batch_llmin = batch_bounds[:, 0]
1383+
batch_logl_bounds = np.array([(-np.inf, np.inf)])
1384+
batch_llmin = batch_logl_bounds[:, 0]
13851385
# Identify unique particles that make up each strand.
13861386
ids = np.unique(res.samples_id)
13871387

@@ -1629,7 +1629,7 @@ def unravel_run(res, print_progress=True):
16291629
# Add on batch information (if available).
16301630
try:
16311631
rdict['samples_batch'] = res.samples_batch[strand]
1632-
rdict['batch_bounds'] = res.batch_bounds
1632+
rdict['batch_logl_bounds'] = res.batch_logl_bounds
16331633
except AttributeError:
16341634
pass
16351635

@@ -1858,15 +1858,15 @@ def _prepare_for_merge(res):
18581858
"iterations and number of live points in `res1`.")
18591859

18601860
# Batch information (if available).
1861-
# note we also check for existance of batch_bounds
1861+
# note we also check for existance of batch_logl_bounds
18621862
# because unravel_run makes 'static' runs of 1 livepoint
18631863
# but some will have bounds
1864-
if res.isdynamic() or 'batch_bounds' in res.keys():
1864+
if res.isdynamic() or 'batch_logl_bounds' in res.keys():
18651865
run_info['batch'] = res.samples_batch
1866-
run_info['bounds'] = res.batch_bounds
1866+
run_info['batch_logl_bounds'] = res.batch_logl_bounds
18671867
else:
18681868
run_info['batch'] = np.zeros(nrun, dtype=int)
1869-
run_info['bounds'] = np.array([(-np.inf, np.inf)])
1869+
run_info['batch_logl_bounds'] = np.array([(-np.inf, np.inf)])
18701870
return run_nlive, run_info
18711871

18721872

@@ -1910,20 +1910,22 @@ def _merge_two(res1, res2, compute_aux=False):
19101910

19111911
# These are merged batch bounds
19121912
combined_bounds = np.unique(np.concatenate(
1913-
(base_info['bounds'], new_info['bounds'])),
1913+
(base_info['batch_logl_bounds'], new_info['batch_logl_bounds'])),
19141914
axis=0)
19151915
# Here we try to find where the new bounds are in the combined bounds
19161916
new_bound_map = {}
19171917
base_bound_map = {}
1918-
for i in range(len(new_info['bounds'])):
1918+
for i in range(len(new_info['batch_logl_bounds'])):
19191919
new_bound_map[i] = np.where(
1920-
np.all(new_info['bounds'][i] == combined_bounds, axis=1))[0][0]
1921-
for i in range(len(base_info['bounds'])):
1920+
np.all(new_info['batch_logl_bounds'][i] == combined_bounds,
1921+
axis=1))[0][0]
1922+
for i in range(len(base_info['batch_logl_bounds'])):
19221923
base_bound_map[i] = np.where(
1923-
np.all(base_info['bounds'][i] == combined_bounds, axis=1))[0][0]
1924+
np.all(base_info['batch_logl_bounds'][i] == combined_bounds,
1925+
axis=1))[0][0]
19241926

1925-
base_lowedge = np.min(base_info['bounds'][base_info['batch']])
1926-
new_lowedge = np.min(new_info['bounds'][new_info['batch']])
1927+
base_lowedge = np.min(base_info['batch_logl_bounds'][base_info['batch']])
1928+
new_lowedge = np.min(new_info['batch_logl_bounds'][new_info['batch']])
19271929

19281930
# Iteratively walk through both set of samples to simulate
19291931
# a combined run.
@@ -2021,7 +2023,7 @@ def _merge_two(res1, res2, compute_aux=False):
20212023
samples=np.asarray(combined_info['v']),
20222024
logl=np.asarray(combined_info['logl']),
20232025
logvol=np.asarray(combined_info['logvol']),
2024-
batch_bounds=np.asarray(combined_bounds),
2026+
batch_logl_bounds=np.asarray(combined_bounds),
20252027
blob=np.asarray(combined_info['blob']))
20262028

20272029
for curk in ['id', 'it', 'n', 'u', 'batch']:

tests/test_misc.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,8 @@ def test_maxcall():
6464
sampler.run_nested(dlogz_init=1, maxcall=1000, print_progress=printing)
6565

6666

67-
@pytest.mark.parametrize('dynamic,with_pool',
68-
itertools.product([True, False], [True, False]))
67+
@pytest.mark.parametrize('dynamic', [True, False])
68+
@pytest.mark.parametrize('with_pool', [True, False])
6969
def test_pickle(dynamic, with_pool):
7070
# test of pickling functionality
7171
ndim = 2
@@ -501,7 +501,7 @@ def test_maxiter_batch():
501501
# I am finding the the first iteration with the batch
502502
# [-inf, something]. Then I'm setting maxiter to be just above
503503
# that iteration
504-
b1 = np.where(~np.isfinite(dres2.batch_bounds[:, 0]))[0][1]
504+
b1 = np.where(~np.isfinite(dres2.batch_logl_bounds[:, 0]))[0][1]
505505
maxiter = np.min(
506506
np.array(dsampler2.saved_run['it'])[
507507
dsampler2.saved_run['batch'] == b1]) + nlive // 2
@@ -603,7 +603,8 @@ def test_verify_batch():
603603
1].min() > d0.results['samples_it'].max()
604604
# checke that the iterations are set correctly
605605
assert d1.ncall > d0.ncall
606-
assert len(d1.results.batch_bounds) > len(d0.results.batch_bounds)
606+
assert len(d1.results.batch_logl_bounds) > len(
607+
d0.results.batch_logl_bounds)
607608

608609

609610
@pytest.mark.parametrize('dynamic', [False, True])

0 commit comments

Comments
 (0)