Measuring placebo effect on hunger

In this example we show show to measure the impact of placebo on hunger.

We model how hunger, \(h(t)\), changes over time, \(t\), over the course of \(T\) hours, after taking a tablet, and given a communication based intervention, \(i \in \{0, 1\}\).
Each participant will do this twice with different \(i\) interventions.
[1]:

import os import jax.random os.environ["XLA_FLAGS"] = "--xla_force_host_platform_device_count=6" from jax.config import config config.update("jax_enable_x64", True) import numpy as np import tensorflow_probability.substrates.jax as tfp from jax import numpy as jnp from jaxns import Prior tfpd = tfp.distributions
/tmp/ipykernel_24225/661831631.py:6: DeprecationWarning: Accessing jax.config via the jax.config submodule is deprecated.
  from jax.config import config
INFO[2024-01-31 23:12:36,070]: Unable to initialize backend 'cuda':
INFO[2024-01-31 23:12:36,070]: Unable to initialize backend 'rocm': module 'jaxlib.xla_extension' has no attribute 'GpuAllocatorConfig'
INFO[2024-01-31 23:12:36,072]: Unable to initialize backend 'tpu': INTERNAL: Failed to open libtpu.so: libtpu.so: cannot open shared object file: No such file or directory
WARNING[2024-01-31 23:12:36,072]: An NVIDIA GPU may be present on this machine, but a CUDA-enabled jaxlib is not installed. Falling back to cpu.
[2]:
from jaxns.framework.ops import simulate_prior_model
from jaxns import Model
from typing import NamedTuple

T = 3
dt_measure = 0.5
num_measurements = int(T / dt_measure) + 1
measure_times = np.linspace(0., T, num_measurements)
num_patients = 60

trial_mapping = dict()

count = 0
for day in [0, 1]:
    intervention = day == 1
    for patient_id in range(num_patients):
        trial_mapping[count] = (patient_id, day, intervention)
        count += 1

num_trials = len(trial_mapping)

intervention_mask = np.zeros((num_trials,), np.bool_)

for idx, (patient_id, day, intervention) in trial_mapping.items():
    if intervention:
        intervention_mask[idx] = True


class Params(NamedTuple):
    mean_hunger: jnp.ndarray  # mean hunger(t)


def prior_model():
    noise_scale = yield Prior(tfpd.TruncatedNormal(1., 0.5, low=0.5, high=2.), name='noise_scale')

    prior_h0_mean = yield Prior(tfpd.TruncatedNormal(5., 2., low=0., high=10.), name='prior_h0_mean')
    prior_h0_scale = yield Prior(tfpd.Uniform(0.5, 3.), name='prior_h0_scale')

    prior_h_change_per_hour = yield Prior(tfpd.Uniform(0.5, 2.))
    prior_h_change_control = yield Prior(prior_h_change_per_hour * T, name='prior_h_change_control')

    prior_h_change_intervention_factor = yield Prior(tfpd.Uniform(0.1, 1.), name='prior_h_change_intervention_factor')
    prior_h_change_intervention = prior_h_change_intervention_factor * prior_h_change_control

    # Choose right change for intervention.
    prior_h_change = jnp.where(intervention_mask, prior_h_change_intervention, prior_h_change_control)

    h0 = yield Prior(tfpd.TruncatedNormal(prior_h0_mean, prior_h0_scale,
                                          low=jnp.zeros((num_trials,)), high=10.),
                     name=f"h0")
    hT = yield Prior(tfpd.TruncatedNormal(h0 + prior_h_change, 1.,
                                          low=jnp.zeros((num_trials,)), high=10.),
                     name=f"hT")
    mean_hunger = h0[:, None] + measure_times * (hT[:, None] - h0[:, None]) / T
    patient_params = Params(
        mean_hunger=mean_hunger
    )
    return patient_params, noise_scale

Simulate Data

[3]:


(patient_params, noise_scale), collected_params = simulate_prior_model(key=jax.random.PRNGKey(42), prior_model=prior_model) measured_hunger = tfpd.TruncatedNormal(patient_params.mean_hunger, noise_scale, low=0., high=10.).sample( seed=jax.random.PRNGKey(42)) def log_likelihood(patient_params: Params, noise_scale): return jnp.sum(tfpd.TruncatedNormal(patient_params.mean_hunger, noise_scale, low=0., high=10.).log_prob(measured_hunger) ) model = Model(prior_model=prior_model, log_likelihood=log_likelihood) import pylab as plt plt.plot(measure_times, measured_hunger[~intervention_mask, :].T, c='k', alpha=0.1) plt.plot(measure_times, measured_hunger[intervention_mask, :].T, c='r', alpha=0.1) plt.ylim(0., 10.) plt.show()
../_images/examples_placebo_effect_4_0.png

Define likelihood

Make a model that is unaware of the effect

[4]:

def unaware_prior_model(): noise_scale = yield Prior(tfpd.TruncatedNormal(1., 0.5, low=0.5, high=2.), name='noise_scale') prior_h0_mean = yield Prior(tfpd.TruncatedNormal(5., 2., low=0., high=10.), name='prior_h0_mean') prior_h0_scale = yield Prior(tfpd.Uniform(0.5, 3.), name='prior_h0_scale') prior_h_change_per_hour = yield Prior(tfpd.Uniform(0.5, 2.)) prior_h_change_control = yield Prior(prior_h_change_per_hour * T, name='prior_h_change_control') prior_h_change = prior_h_change_control h0 = yield Prior(tfpd.TruncatedNormal(prior_h0_mean, prior_h0_scale, low=jnp.zeros((num_trials,)), high=10.), name=f"h0").parametrised() hT = yield Prior(tfpd.TruncatedNormal(h0 + prior_h_change, 1., low=jnp.zeros((num_trials,)), high=10.), name=f"hT").parametrised() mean_hunger = h0[:, None] + measure_times * (hT[:, None] - h0[:, None]) / T patient_params = Params( mean_hunger=mean_hunger ) return patient_params, noise_scale unaware_model = Model(prior_model=unaware_prior_model, log_likelihood=log_likelihood)
[5]:
from jaxns.experimental import EvidenceMaximisation

em = EvidenceMaximisation(model=unaware_model, ns_kwargs=dict(max_samples=1e5), max_num_epochs=10, gtol=0.1, momentum=0.)

[6]:
from jaxns import summary, plot_diagnostics, plot_cornerplot

ns_results, params = em.train(num_steps=10)
summary(ns_results, with_parametrised=True)
plot_diagnostics(ns_results)
plot_cornerplot(ns_results, variables=['noise_scale', 'prior_h0_mean', 'prior_h0_scale', 'prior_h_change_control'],
                with_parametrised=True)
Convergence achieved at step 7.:  70%|███████   | 7/10 [07:16<03:06, 62.32s/it]
--------
Termination Conditions:
Small remaining evidence
--------
likelihood evals: 238558
samples: 2880
phantom samples: 0
likelihood evals / sample: 82.8
phantom fraction (%): 0.0%
--------
logZ=-896.52 +- 0.33
H=-13.32
ESS=274
--------
h0[#]: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
h0[0]: 4.871 +- 0.047 | 4.811 / 4.873 / 4.93 | 4.876 | 4.873
h0[1]: 1.787 +- 0.055 | 1.713 / 1.787 / 1.856 | 1.794 | 1.793
h0[2]: 6.936 +- 0.062 | 6.867 / 6.928 / 7.013 | 6.94 | 6.935
h0[3]: 3.744 +- 0.045 | 3.68 / 3.745 / 3.799 | 3.749 | 3.747
h0[4]: 3.75 +- 0.045 | 3.686 / 3.751 / 3.805 | 3.756 | 3.753
h0[5]: 3.307 +- 0.046 | 3.242 / 3.307 / 3.361 | 3.313 | 3.311
h0[6]: 7.193 +- 0.065 | 7.121 / 7.185 / 7.274 | 7.196 | 7.191
h0[7]: 5.436 +- 0.05 | 5.377 / 5.432 / 5.501 | 5.441 | 5.437
h0[8]: 4.781 +- 0.047 | 4.722 / 4.782 / 4.838 | 4.786 | 4.783
h0[9]: 0.918 +- 0.061 | 0.837 / 0.92 / 0.999 | 0.926 | 0.925
h0[10]: 4.32 +- 0.046 | 4.259 / 4.321 / 4.374 | 4.326 | 4.323
h0[11]: 1.373 +- 0.058 | 1.296 / 1.374 / 1.45 | 1.381 | 1.38
h0[12]: 6.125 +- 0.055 | 6.061 / 6.121 / 6.196 | 6.129 | 6.125
h0[13]: 7.873 +- 0.072 | 7.788 / 7.864 / 7.961 | 7.876 | 7.87
h0[14]: 3.34 +- 0.046 | 3.276 / 3.34 / 3.394 | 3.346 | 3.344
h0[15]: 2.996 +- 0.047 | 2.93 / 2.996 / 3.052 | 3.002 | 3.0
h0[16]: 0.597 +- 0.061 | 0.517 / 0.6 / 0.679 | 0.604 | 0.603
h0[17]: 4.338 +- 0.046 | 4.276 / 4.339 / 4.392 | 4.343 | 4.34
h0[18]: 2.541 +- 0.049 | 2.471 / 2.541 / 2.602 | 2.548 | 2.546
h0[19]: 6.995 +- 0.063 | 6.925 / 6.987 / 7.073 | 6.998 | 6.993
h0[20]: 1.252 +- 0.059 | 1.173 / 1.252 / 1.331 | 1.26 | 1.259
h0[21]: 3.737 +- 0.045 | 3.674 / 3.738 / 3.793 | 3.743 | 3.74
h0[22]: 4.384 +- 0.046 | 4.323 / 4.385 / 4.438 | 4.389 | 4.386
h0[23]: 8.834 +- 0.083 | 8.742 / 8.826 / 8.939 | 8.836 | 8.83
h0[24]: 1.596 +- 0.056 | 1.52 / 1.595 / 1.67 | 1.603 | 1.602
h0[25]: 1.394 +- 0.058 | 1.316 / 1.395 / 1.47 | 1.402 | 1.401
h0[26]: 8.093 +- 0.074 | 8.005 / 8.084 / 8.184 | 8.096 | 8.09
h0[27]: 2.952 +- 0.047 | 2.885 / 2.952 / 3.009 | 2.958 | 2.956
h0[28]: 1.627 +- 0.056 | 1.551 / 1.626 / 1.701 | 1.634 | 1.633
h0[29]: 3.773 +- 0.045 | 3.709 / 3.775 / 3.829 | 3.779 | 3.777
h0[30]: 1.18 +- 0.059 | 1.1 / 1.18 / 1.258 | 1.187 | 1.187
h0[31]: 4.476 +- 0.046 | 4.415 / 4.478 / 4.529 | 4.481 | 4.478
h0[32]: 1.65 +- 0.056 | 1.574 / 1.649 / 1.723 | 1.657 | 1.656
h0[33]: 7.4e-05 +- 3e-05 | 4.3e-05 / 7.1e-05 / 0.000114 | 7.2e-05 | 7.2e-05
h0[34]: 0.6 +- 0.061 | 0.52 / 0.604 / 0.683 | 0.607 | 0.607
h0[35]: 4.126 +- 0.045 | 4.064 / 4.127 / 4.181 | 4.132 | 4.129
h0[36]: 1.086 +- 0.06 | 1.006 / 1.087 / 1.165 | 1.093 | 1.093
h0[37]: 5.219 +- 0.049 | 5.159 / 5.217 / 5.281 | 5.224 | 5.22
h0[38]: 5.857 +- 0.053 | 5.795 / 5.853 / 5.926 | 5.861 | 5.857
h0[39]: 2.179 +- 0.052 | 2.106 / 2.179 / 2.244 | 2.186 | 2.184
h0[40]: 5.477 +- 0.05 | 5.418 / 5.473 / 5.542 | 5.481 | 5.478
h0[41]: 3.108 +- 0.047 | 3.044 / 3.108 / 3.164 | 3.114 | 3.112
h0[42]: 1.213 +- 0.059 | 1.133 / 1.213 / 1.291 | 1.22 | 1.219
h0[43]: 2.957 +- 0.047 | 2.89 / 2.957 / 3.014 | 2.963 | 2.961
h0[44]: 4.54 +- 0.046 | 4.479 / 4.542 / 4.593 | 4.545 | 4.542
h0[45]: 4.419 +- 0.046 | 4.358 / 4.42 / 4.472 | 4.424 | 4.421
h0[46]: 0.759 +- 0.062 | 0.678 / 0.762 / 0.841 | 0.766 | 0.766
h0[47]: 1.599 +- 0.056 | 1.524 / 1.599 / 1.674 | 1.607 | 1.606
h0[48]: 5.89 +- 0.053 | 5.828 / 5.887 / 5.959 | 5.895 | 5.891
h0[49]: 3.819 +- 0.045 | 3.754 / 3.82 / 3.874 | 3.824 | 3.822
h0[50]: 2.46 +- 0.05 | 2.388 / 2.46 / 2.522 | 2.466 | 2.465
h0[51]: 6.357 +- 0.057 | 6.291 / 6.351 / 6.43 | 6.361 | 6.356
h0[52]: 4.433 +- 0.046 | 4.372 / 4.434 / 4.486 | 4.438 | 4.435
h0[53]: 5.634 +- 0.051 | 5.574 / 5.63 / 5.7 | 5.639 | 5.635
h0[54]: 6.269 +- 0.056 | 6.204 / 6.264 / 6.341 | 6.273 | 6.269
h0[55]: 1.154 +- 0.06 | 1.074 / 1.155 / 1.233 | 1.162 | 1.161
h0[56]: 4.785 +- 0.047 | 4.725 / 4.785 / 4.841 | 4.79 | 4.786
h0[57]: 7.38 +- 0.067 | 7.304 / 7.372 / 7.464 | 7.383 | 7.378
h0[58]: 4.229 +- 0.046 | 4.167 / 4.23 / 4.283 | 4.234 | 4.232
h0[59]: 1.758 +- 0.055 | 1.683 / 1.757 / 1.827 | 1.765 | 1.764
h0[60]: 8.673 +- 0.081 | 8.582 / 8.664 / 8.774 | 8.675 | 8.669
h0[61]: 4.555 +- 0.046 | 4.494 / 4.556 / 4.608 | 4.56 | 4.557
h0[62]: 3.835 +- 0.045 | 3.77 / 3.836 / 3.89 | 3.84 | 3.838
h0[63]: 2.257 +- 0.051 | 2.183 / 2.257 / 2.321 | 2.264 | 2.263
h0[64]: 6.299 +- 0.056 | 6.234 / 6.294 / 6.372 | 6.303 | 6.299
h0[65]: 4.59 +- 0.046 | 4.53 / 4.592 / 4.644 | 4.595 | 4.592
h0[66]: 2.236 +- 0.051 | 2.162 / 2.236 / 2.3 | 2.243 | 2.241
h0[67]: 2.317 +- 0.051 | 2.243 / 2.317 / 2.38 | 2.324 | 2.322
h0[68]: 2.502 +- 0.05 | 2.431 / 2.502 / 2.563 | 2.509 | 2.507
h0[69]: 2.526 +- 0.05 | 2.456 / 2.526 / 2.587 | 2.533 | 2.531
h0[70]: 5.692 +- 0.052 | 5.633 / 5.688 / 5.759 | 5.697 | 5.693
h0[71]: 6.454 +- 0.058 | 6.387 / 6.447 / 6.527 | 6.458 | 6.453
h0[72]: 3.457 +- 0.046 | 3.395 / 3.456 / 3.513 | 3.463 | 3.46
h0[73]: 4.898 +- 0.047 | 4.837 / 4.899 / 4.957 | 4.903 | 4.9
h0[74]: 3.957 +- 0.045 | 3.893 / 3.958 / 4.012 | 3.962 | 3.96
h0[75]: 0.565 +- 0.061 | 0.485 / 0.568 / 0.647 | 0.571 | 0.571
h0[76]: 6.079 +- 0.055 | 6.015 / 6.075 / 6.149 | 6.083 | 6.079
h0[77]: 2.987 +- 0.047 | 2.921 / 2.988 / 3.044 | 2.994 | 2.992
h0[78]: 3.672 +- 0.046 | 3.61 / 3.672 / 3.728 | 3.678 | 3.675
h0[79]: 4.507 +- 0.046 | 4.447 / 4.509 / 4.56 | 4.513 | 4.509
h0[80]: 2.256 +- 0.051 | 2.182 / 2.256 / 2.32 | 2.263 | 2.262
h0[81]: 6.67 +- 0.06 | 6.603 / 6.661 / 6.744 | 6.673 | 6.669
h0[82]: 4.034 +- 0.045 | 3.971 / 4.035 / 4.089 | 4.04 | 4.037
h0[83]: 3.348 +- 0.046 | 3.284 / 3.348 / 3.402 | 3.354 | 3.352
h0[84]: 5.585 +- 0.051 | 5.526 / 5.582 / 5.651 | 5.59 | 5.586
h0[85]: 7.279 +- 0.066 | 7.205 / 7.27 / 7.361 | 7.282 | 7.277
h0[86]: 4.713 +- 0.047 | 4.653 / 4.714 / 4.768 | 4.718 | 4.715
h0[87]: 5.476 +- 0.05 | 5.416 / 5.471 / 5.541 | 5.48 | 5.476
h0[88]: 9.999997 +- 6.1e-05 | 9.999955 / 10.000001 / 10.000049 | 9.999957 | 9.999998
h0[89]: 4.023 +- 0.045 | 3.96 / 4.025 / 4.078 | 4.029 | 4.026
h0[90]: 0.809 +- 0.062 | 0.728 / 0.812 / 0.892 | 0.817 | 0.816
h0[91]: 7.08 +- 0.064 | 7.008 / 7.072 / 7.159 | 7.083 | 7.078
h0[92]: 3.648 +- 0.046 | 3.587 / 3.648 / 3.704 | 3.654 | 3.652
h0[93]: 1.057 +- 0.06 | 0.977 / 1.058 / 1.137 | 1.065 | 1.064
h0[94]: 5.452 +- 0.05 | 5.393 / 5.448 / 5.517 | 5.456 | 5.452
h0[95]: 4.93 +- 0.047 | 4.868 / 4.931 / 4.989 | 4.935 | 4.932
h0[96]: 2.194 +- 0.052 | 2.121 / 2.194 / 2.259 | 2.201 | 2.2
h0[97]: 2.212 +- 0.051 | 2.138 / 2.212 / 2.276 | 2.219 | 2.217
h0[98]: 5.86 +- 0.053 | 5.799 / 5.857 / 5.929 | 5.865 | 5.86
h0[99]: 4.713 +- 0.047 | 4.654 / 4.714 / 4.768 | 4.718 | 4.715
h0[100]: 6.137 +- 0.055 | 6.072 / 6.133 / 6.208 | 6.141 | 6.137
h0[101]: 5.835 +- 0.053 | 5.774 / 5.832 / 5.904 | 5.84 | 5.836
h0[102]: 4.309 +- 0.046 | 4.248 / 4.31 / 4.363 | 4.315 | 4.312
h0[103]: 4.016 +- 0.045 | 3.953 / 4.017 / 4.071 | 4.022 | 4.019
h0[104]: 2.891 +- 0.048 | 2.823 / 2.891 / 2.948 | 2.897 | 2.895
h0[105]: 1.668 +- 0.055 | 1.592 / 1.667 / 1.74 | 1.675 | 1.674
h0[106]: 4.461 +- 0.046 | 4.401 / 4.464 / 4.515 | 4.467 | 4.463
h0[107]: 3.2 +- 0.047 | 3.136 / 3.199 / 3.254 | 3.206 | 3.204
h0[108]: 7.009 +- 0.063 | 6.939 / 7.001 / 7.087 | 7.012 | 7.007
h0[109]: 3.472 +- 0.046 | 3.411 / 3.471 / 3.528 | 3.478 | 3.476
h0[110]: 2.788 +- 0.048 | 2.721 / 2.787 / 2.846 | 2.795 | 2.793
h0[111]: 3.076 +- 0.047 | 3.012 / 3.076 / 3.132 | 3.083 | 3.08
h0[112]: 2.891 +- 0.048 | 2.823 / 2.891 / 2.948 | 2.897 | 2.895
h0[113]: 7.334 +- 0.066 | 7.259 / 7.325 / 7.417 | 7.337 | 7.332
h0[114]: 5.613 +- 0.051 | 5.553 / 5.609 / 5.679 | 5.617 | 5.613
h0[115]: 5.157 +- 0.049 | 5.096 / 5.155 / 5.217 | 5.161 | 5.158
h0[116]: 5.197 +- 0.049 | 5.137 / 5.195 / 5.258 | 5.202 | 5.198
h0[117]: 2.649 +- 0.049 | 2.582 / 2.65 / 2.709 | 2.656 | 2.654
h0[118]: 3.836 +- 0.045 | 3.771 / 3.838 / 3.891 | 3.842 | 3.839
h0[119]: 2.885 +- 0.048 | 2.817 / 2.884 / 2.942 | 2.891 | 2.889
--------
hT[#]: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
hT[0]: 9.618 +- 0.03 | 9.578 / 9.621 / 9.652 | 9.624 | 9.615
hT[1]: 6.304 +- 0.054 | 6.231 / 6.307 / 6.369 | 6.315 | 6.301
hT[2]: 8.873 +- 0.038 | 8.823 / 8.875 / 8.921 | 8.877 | 8.868
hT[3]: 7.203 +- 0.05 | 7.133 / 7.207 / 7.265 | 7.213 | 7.198
hT[4]: 7.653 +- 0.05 | 7.583 / 7.657 / 7.715 | 7.663 | 7.648
hT[5]: 4.511 +- 0.05 | 4.442 / 4.514 / 4.576 | 4.522 | 4.506
hT[6]: 9.99687 +- 0.00017 | 9.99665 / 9.99689 / 9.99709 | 9.9969 | 9.99686
hT[7]: 9.608 +- 0.027 | 9.572 / 9.611 / 9.638 | 9.613 | 9.606
hT[8]: 7.802 +- 0.052 | 7.731 / 7.807 / 7.863 | 7.811 | 7.796
hT[9]: 5.701 +- 0.059 | 5.626 / 5.704 / 5.773 | 5.713 | 5.699
hT[10]: 7.212 +- 0.052 | 7.14 / 7.217 / 7.276 | 7.222 | 7.206
hT[11]: 6.12 +- 0.056 | 6.048 / 6.121 / 6.189 | 6.132 | 6.118
hT[12]: 7.461 +- 0.054 | 7.388 / 7.464 / 7.525 | 7.468 | 7.454
hT[13]: 8.763 +- 0.035 | 8.717 / 8.765 / 8.808 | 8.766 | 8.759
hT[14]: 9.533 +- 0.04 | 9.477 / 9.536 / 9.584 | 9.542 | 9.53
hT[15]: 6.518 +- 0.05 | 6.444 / 6.519 / 6.584 | 6.528 | 6.513
hT[16]: 5.332 +- 0.061 | 5.256 / 5.335 / 5.406 | 5.343 | 5.33
hT[17]: 6.986 +- 0.052 | 6.914 / 6.99 / 7.05 | 6.996 | 6.98
hT[18]: 5.024 +- 0.051 | 4.954 / 5.026 / 5.085 | 5.035 | 5.02
hT[19]: 9.999324 +- 4e-05 | 9.999271 / 9.999328 / 9.999374 | 9.99933 | 9.99932
hT[20]: 5.066 +- 0.057 | 4.994 / 5.068 / 5.135 | 5.078 | 5.064
hT[21]: 7.739 +- 0.05 | 7.669 / 7.743 / 7.801 | 7.749 | 7.734
hT[22]: 9.479 +- 0.038 | 9.427 / 9.484 / 9.525 | 9.487 | 9.476
hT[23]: 8.285 +- 0.039 | 8.233 / 8.287 / 8.336 | 8.288 | 8.28
hT[24]: 5.667 +- 0.055 | 5.594 / 5.669 / 5.733 | 5.678 | 5.665
hT[25]: 5.201 +- 0.056 | 5.129 / 5.202 / 5.27 | 5.213 | 5.199
hT[26]: 8.407 +- 0.04 | 8.355 / 8.409 / 8.458 | 8.411 | 8.402
hT[27]: 7.381 +- 0.05 | 7.307 / 7.382 / 7.446 | 7.391 | 7.376
hT[28]: 6.817 +- 0.055 | 6.744 / 6.82 / 6.884 | 6.829 | 6.815
hT[29]: 7.415 +- 0.05 | 7.345 / 7.419 / 7.478 | 7.425 | 7.41
hT[30]: 6.2 +- 0.058 | 6.127 / 6.201 / 6.269 | 6.212 | 6.198
hT[31]: 8.645 +- 0.049 | 8.576 / 8.649 / 8.706 | 8.654 | 8.639
hT[32]: 6.64 +- 0.055 | 6.567 / 6.643 / 6.706 | 6.651 | 6.637
hT[33]: 4.186 +- 0.079 | 4.076 / 4.186 / 4.278 | 4.19 | 4.177
hT[34]: 4.033 +- 0.061 | 3.957 / 4.036 / 4.106 | 4.044 | 4.031
hT[35]: 7.386 +- 0.051 | 7.315 / 7.391 / 7.452 | 7.396 | 7.38
hT[36]: 7.308 +- 0.058 | 7.234 / 7.31 / 7.378 | 7.32 | 7.306
hT[37]: 9.241 +- 0.04 | 9.188 / 9.243 / 9.287 | 9.248 | 9.236
hT[38]: 7.278 +- 0.055 | 7.205 / 7.281 / 7.342 | 7.286 | 7.27
hT[39]: 7.152 +- 0.052 | 7.082 / 7.156 / 7.212 | 7.163 | 7.149
hT[40]: 8.626 +- 0.048 | 8.561 / 8.63 / 8.68 | 8.633 | 8.62
hT[41]: 8.041 +- 0.05 | 7.97 / 8.042 / 8.106 | 8.052 | 8.037
hT[42]: 8.266 +- 0.057 | 8.194 / 8.268 / 8.335 | 8.278 | 8.265
hT[43]: 6.372 +- 0.05 | 6.299 / 6.374 / 6.438 | 6.383 | 6.368
hT[44]: 4.914 +- 0.053 | 4.84 / 4.918 / 4.979 | 4.923 | 4.907
hT[45]: 9.046 +- 0.046 | 8.982 / 9.051 / 9.103 | 9.055 | 9.041
hT[46]: 3.217 +- 0.06 | 3.141 / 3.22 / 3.29 | 3.229 | 3.215
hT[47]: 4.299 +- 0.055 | 4.227 / 4.302 / 4.366 | 4.311 | 4.297
hT[48]: 8.037 +- 0.052 | 7.969 / 8.041 / 8.097 | 8.045 | 8.03
hT[49]: 7.703 +- 0.05 | 7.633 / 7.706 / 7.766 | 7.713 | 7.698
hT[50]: 3.414 +- 0.051 | 3.345 / 3.417 / 3.474 | 3.425 | 3.41
hT[51]: 8.237 +- 0.049 | 8.17 / 8.24 / 8.295 | 8.243 | 8.231
hT[52]: 8.16 +- 0.051 | 8.09 / 8.165 / 8.223 | 8.17 | 8.154
hT[53]: 9.99606 +- 0.00037 | 9.99556 / 9.99611 / 9.99647 | 9.99614 | 9.99604
hT[54]: 8.906 +- 0.041 | 8.85 / 8.909 / 8.955 | 8.912 | 8.901
hT[55]: 5.55 +- 0.058 | 5.477 / 5.552 / 5.62 | 5.562 | 5.549
hT[56]: 6.561 +- 0.054 | 6.488 / 6.565 / 6.623 | 6.57 | 6.554
hT[57]: 9.061 +- 0.032 | 9.02 / 9.063 / 9.102 | 9.065 | 9.058
hT[58]: 8.822 +- 0.048 | 8.754 / 8.827 / 8.882 | 8.831 | 8.816
hT[59]: 4.292 +- 0.054 | 4.219 / 4.296 / 4.357 | 4.304 | 4.29
hT[60]: 8.017 +- 0.043 | 7.96 / 8.02 / 8.073 | 8.021 | 8.012
hT[61]: 6.225 +- 0.053 | 6.152 / 6.23 / 6.29 | 6.235 | 6.219
hT[62]: 4.14 +- 0.051 | 4.07 / 4.144 / 4.204 | 4.15 | 4.135
hT[63]: 4.075 +- 0.052 | 4.005 / 4.079 / 4.135 | 4.086 | 4.072
hT[64]: 6.968 +- 0.056 | 6.892 / 6.971 / 7.035 | 6.975 | 6.961
hT[65]: 5.127 +- 0.053 | 5.053 / 5.132 / 5.192 | 5.137 | 5.121
hT[66]: 2.815 +- 0.052 | 2.745 / 2.819 / 2.876 | 2.826 | 2.812
hT[67]: 1.508 +- 0.052 | 1.439 / 1.512 / 1.569 | 1.519 | 1.505
hT[68]: 4.597 +- 0.051 | 4.527 / 4.6 / 4.657 | 4.608 | 4.593
hT[69]: 5.012 +- 0.051 | 4.942 / 5.014 / 5.073 | 5.023 | 5.008
hT[70]: 6.475 +- 0.057 | 6.401 / 6.479 / 6.54 | 6.483 | 6.468
hT[71]: 9.169 +- 0.035 | 9.122 / 9.171 / 9.21 | 9.173 | 9.165
hT[72]: 6.017 +- 0.05 | 5.949 / 6.021 / 6.083 | 6.028 | 6.013
hT[73]: 4.601 +- 0.055 | 4.529 / 4.606 / 4.665 | 4.611 | 4.595
hT[74]: 5.807 +- 0.051 | 5.736 / 5.811 / 5.871 | 5.817 | 5.801
hT[75]: 3.081 +- 0.061 | 3.005 / 3.084 / 3.155 | 3.092 | 3.079
hT[76]: 6.866 +- 0.056 | 6.79 / 6.869 / 6.933 | 6.874 | 6.859
hT[77]: 5.156 +- 0.05 | 5.082 / 5.157 / 5.222 | 5.167 | 5.152
hT[78]: 7.849 +- 0.05 | 7.779 / 7.853 / 7.91 | 7.859 | 7.844
hT[79]: 6.878 +- 0.053 | 6.805 / 6.882 / 6.942 | 6.887 | 6.871
hT[80]: 6.616 +- 0.052 | 6.547 / 6.62 / 6.677 | 6.628 | 6.613
hT[81]: 7.674 +- 0.052 | 7.604 / 7.677 / 7.74 | 7.68 | 7.667
hT[82]: 4.206 +- 0.051 | 4.135 / 4.211 / 4.271 | 4.216 | 4.2
hT[83]: 5.651 +- 0.05 | 5.582 / 5.654 / 5.716 | 5.661 | 5.646
hT[84]: 6.011 +- 0.057 | 5.935 / 6.015 / 6.076 | 6.019 | 6.003
hT[85]: 8.462 +- 0.042 | 8.408 / 8.464 / 8.516 | 8.467 | 8.457
hT[86]: 5.164 +- 0.054 | 5.09 / 5.169 / 5.228 | 5.174 | 5.158
hT[87]: 6.542 +- 0.056 | 6.468 / 6.547 / 6.605 | 6.551 | 6.535
hT[88]: 9.97112 +- 0.00065 | 9.97021 / 9.97113 / 9.97186 | 9.97117 | 9.97106
hT[89]: 5.373 +- 0.051 | 5.303 / 5.378 / 5.438 | 5.383 | 5.368
hT[90]: 1.854 +- 0.06 | 1.779 / 1.857 / 1.926 | 1.866 | 1.853
hT[91]: 9.095 +- 0.033 | 9.053 / 9.098 / 9.137 | 9.099 | 9.092
hT[92]: 4.511 +- 0.05 | 4.441 / 4.515 / 4.573 | 4.521 | 4.506
hT[93]: 2.207 +- 0.058 | 2.134 / 2.21 / 2.277 | 2.219 | 2.206
hT[94]: 8.645 +- 0.048 | 8.581 / 8.65 / 8.699 | 8.653 | 8.639
hT[95]: 8.519 +- 0.05 | 8.453 / 8.523 / 8.578 | 8.528 | 8.513
hT[96]: 7.004 +- 0.052 | 6.934 / 7.008 / 7.065 | 7.016 | 7.001
hT[97]: 1.596 +- 0.052 | 1.526 / 1.6 / 1.657 | 1.607 | 1.593
hT[98]: 9.367 +- 0.033 | 9.323 / 9.37 / 9.405 | 9.373 | 9.364
hT[99]: 7.925 +- 0.052 | 7.853 / 7.929 / 7.986 | 7.934 | 7.918
hT[100]: 9.353 +- 0.032 | 9.309 / 9.355 / 9.39 | 9.357 | 9.349
hT[101]: 3.994 +- 0.059 | 3.916 / 3.998 / 4.063 | 4.002 | 3.986
hT[102]: 7.855 +- 0.051 | 7.783 / 7.859 / 7.918 | 7.864 | 7.849
hT[103]: 3.06 +- 0.051 | 2.989 / 3.064 / 3.125 | 3.07 | 3.054
hT[104]: 2.766 +- 0.05 | 2.694 / 2.768 / 2.831 | 2.777 | 2.762
hT[105]: 2.466 +- 0.055 | 2.393 / 2.469 / 2.532 | 2.477 | 2.463
hT[106]: 7.507 +- 0.052 | 7.435 / 7.512 / 7.571 | 7.517 | 7.501
hT[107]: 3.08 +- 0.05 | 3.01 / 3.081 / 3.144 | 3.09 | 3.075
hT[108]: 6.811 +- 0.056 | 6.737 / 6.814 / 6.883 | 6.817 | 6.803
hT[109]: 6.163 +- 0.05 | 6.095 / 6.167 / 6.229 | 6.174 | 6.158
hT[110]: 4.518 +- 0.05 | 4.445 / 4.52 / 4.581 | 4.529 | 4.514
hT[111]: 4.723 +- 0.05 | 4.652 / 4.725 / 4.789 | 4.734 | 4.719
hT[112]: 3.596 +- 0.05 | 3.524 / 3.598 / 3.661 | 3.607 | 3.592
hT[113]: 5.677 +- 0.061 | 5.599 / 5.679 / 5.755 | 5.683 | 5.669
hT[114]: 5.192 +- 0.058 | 5.114 / 5.195 / 5.257 | 5.2 | 5.184
hT[115]: 8.325 +- 0.051 | 8.259 / 8.327 / 8.384 | 8.333 | 8.319
hT[116]: 4.987 +- 0.056 | 4.914 / 4.988 / 5.051 | 4.996 | 4.979
hT[117]: 1.51 +- 0.051 | 1.438 / 1.512 / 1.572 | 1.521 | 1.506
hT[118]: 3.736 +- 0.051 | 3.665 / 3.739 / 3.8 | 3.746 | 3.73
hT[119]: 5.66 +- 0.05 | 5.588 / 5.662 / 5.725 | 5.671 | 5.656
--------
noise_scale: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
noise_scale: 0.713 +- 0.019 | 0.689 / 0.712 / 0.737 | 0.709 | 0.71
--------
prior_h0_mean: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
prior_h0_mean: 3.799 +- 0.045 | 3.734 / 3.8 / 3.854 | 3.804 | 3.802
--------
prior_h0_scale: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
prior_h0_scale: 0.849 +- 0.012 | 0.834 / 0.848 / 0.865 | 0.848 | 0.848
--------
prior_h_change_control: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
prior_h_change_control: 3.001 +- 0.08 | 2.891 / 3.001 / 3.094 | 3.005 | 2.992
--------
/home/albert/git/jaxns/jaxns/plotting.py:47: RuntimeWarning: divide by zero encountered in divide
  efficiency = 1. / num_likelihood_evaluations_per_sample
../_images/examples_placebo_effect_10_3.png
../_images/examples_placebo_effect_10_4.png
[7]:

def aware_prior_model(): noise_scale = yield Prior(tfpd.TruncatedNormal(1., 0.5, low=0.5, high=2.), name='noise_scale') prior_h0_mean = yield Prior(tfpd.TruncatedNormal(5., 2., low=0., high=10.), name='prior_h0_mean') prior_h0_scale = yield Prior(tfpd.Uniform(0.5, 3.), name='prior_h0_scale') prior_h_change_per_hour = yield Prior(tfpd.Uniform(0.5, 2.)) prior_h_change_control = yield Prior(prior_h_change_per_hour * T, name='prior_h_change_control') prior_h_change_intervention_factor = yield Prior(tfpd.Uniform(0.1, 1.), name='prior_h_change_intervention_factor') prior_h_change_intervention = prior_h_change_intervention_factor * prior_h_change_control # Choose right change for intervention. prior_h_change = jnp.where(intervention_mask, prior_h_change_intervention, prior_h_change_control) h0 = yield Prior(tfpd.TruncatedNormal(prior_h0_mean, prior_h0_scale, low=jnp.zeros((num_trials,)), high=10.), name=f"h0").parametrised() hT = yield Prior(tfpd.TruncatedNormal(h0 + prior_h_change, 1., low=jnp.zeros((num_trials,)), high=10.), name=f"hT").parametrised() mean_hunger = h0[:, None] + measure_times * (hT[:, None] - h0[:, None]) / T patient_params = Params( mean_hunger=mean_hunger ) return patient_params, noise_scale aware_model = Model(prior_model=aware_prior_model, log_likelihood=log_likelihood)
[8]:
em = EvidenceMaximisation(model=aware_model, ns_kwargs=dict(max_samples=1e5), max_num_epochs=10, gtol=0.1, momentum=0.)

ns_results, params = em.train(num_steps=10)
summary(ns_results, with_parametrised=True)
plot_diagnostics(ns_results)
plot_cornerplot(ns_results, variables=['noise_scale', 'prior_h0_mean', 'prior_h0_scale', 'prior_h_change_control','prior_h_change_intervention_factor'],
                with_parametrised=True)

Convergence achieved at step 9.:  90%|█████████ | 9/10 [10:41<01:11, 71.29s/it]
--------
Termination Conditions:
Small remaining evidence
--------
likelihood evals: 316285
samples: 3750
phantom samples: 0
likelihood evals / sample: 84.3
phantom fraction (%): 0.0%
--------
logZ=-899.11 +- 0.31
H=-14.68
ESS=394
--------
h0[#]: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
h0[0]: 4.876 +- 0.046 | 4.819 / 4.873 / 4.934 | 4.868 | 4.877
h0[1]: 1.781 +- 0.059 | 1.706 / 1.782 / 1.852 | 1.783 | 1.79
h0[2]: 6.939 +- 0.06 | 6.867 / 6.936 / 7.018 | 6.925 | 6.935
h0[3]: 3.691 +- 0.046 | 3.633 / 3.692 / 3.748 | 3.687 | 3.695
h0[4]: 3.764 +- 0.046 | 3.706 / 3.765 / 3.821 | 3.76 | 3.768
h0[5]: 3.316 +- 0.048 | 3.254 / 3.319 / 3.378 | 3.313 | 3.321
h0[6]: 7.204 +- 0.062 | 7.125 / 7.2 / 7.286 | 7.188 | 7.199
h0[7]: 5.435 +- 0.048 | 5.377 / 5.43 / 5.496 | 5.425 | 5.434
h0[8]: 4.787 +- 0.046 | 4.731 / 4.783 / 4.844 | 4.779 | 4.789
h0[9]: 0.904 +- 0.067 | 0.819 / 0.907 / 0.985 | 0.909 | 0.916
h0[10]: 4.341 +- 0.045 | 4.284 / 4.339 / 4.395 | 4.335 | 4.343
h0[11]: 1.365 +- 0.063 | 1.285 / 1.367 / 1.439 | 1.368 | 1.375
h0[12]: 6.131 +- 0.053 | 6.064 / 6.129 / 6.2 | 6.119 | 6.129
h0[13]: 7.884 +- 0.07 | 7.795 / 7.881 / 7.975 | 7.866 | 7.877
h0[14]: 3.346 +- 0.047 | 3.284 / 3.349 / 3.408 | 3.342 | 3.351
h0[15]: 3.002 +- 0.049 | 2.938 / 3.004 / 3.067 | 3.0 | 3.008
h0[16]: 0.579 +- 0.069 | 0.493 / 0.584 / 0.662 | 0.584 | 0.59
h0[17]: 4.388 +- 0.045 | 4.331 / 4.386 / 4.442 | 4.382 | 4.39
h0[18]: 2.54 +- 0.052 | 2.473 / 2.543 / 2.605 | 2.54 | 2.547
h0[19]: 7.004 +- 0.06 | 6.93 / 7.0 / 7.083 | 6.989 | 6.999
h0[20]: 1.238 +- 0.064 | 1.157 / 1.241 / 1.314 | 1.241 | 1.248
h0[21]: 3.752 +- 0.046 | 3.693 / 3.753 / 3.809 | 3.747 | 3.756
h0[22]: 4.394 +- 0.045 | 4.337 / 4.393 / 4.448 | 4.388 | 4.397
h0[23]: 8.856 +- 0.081 | 8.757 / 8.851 / 8.959 | 8.835 | 8.847
h0[24]: 1.586 +- 0.06 | 1.509 / 1.587 / 1.659 | 1.589 | 1.596
h0[25]: 1.381 +- 0.062 | 1.301 / 1.384 / 1.456 | 1.384 | 1.391
h0[26]: 8.106 +- 0.072 | 8.015 / 8.104 / 8.201 | 8.088 | 8.099
h0[27]: 2.957 +- 0.049 | 2.893 / 2.959 / 3.022 | 2.955 | 2.963
h0[28]: 1.622 +- 0.06 | 1.545 / 1.623 / 1.695 | 1.625 | 1.632
h0[29]: 3.792 +- 0.046 | 3.734 / 3.793 / 3.849 | 3.788 | 3.796
h0[30]: 1.17 +- 0.065 | 1.088 / 1.173 / 1.246 | 1.174 | 1.181
h0[31]: 4.483 +- 0.045 | 4.426 / 4.48 / 4.538 | 4.476 | 4.485
h0[32]: 1.645 +- 0.06 | 1.568 / 1.645 / 1.717 | 1.647 | 1.654
h0[33]: 3.5e-05 +- 1.8e-05 | 1.6e-05 / 3.1e-05 / 5.6e-05 | 3.2e-05 | 3.3e-05
h0[34]: 0.577 +- 0.069 | 0.491 / 0.582 / 0.66 | 0.582 | 0.588
h0[35]: 4.138 +- 0.045 | 4.079 / 4.136 / 4.192 | 4.132 | 4.141
h0[36]: 1.078 +- 0.065 | 0.995 / 1.081 / 1.156 | 1.082 | 1.089
h0[37]: 5.224 +- 0.047 | 5.168 / 5.221 / 5.286 | 5.215 | 5.224
h0[38]: 5.862 +- 0.051 | 5.799 / 5.86 / 5.929 | 5.851 | 5.861
h0[39]: 2.178 +- 0.055 | 2.106 / 2.179 / 2.247 | 2.178 | 2.186
h0[40]: 5.481 +- 0.049 | 5.423 / 5.477 / 5.543 | 5.471 | 5.48
h0[41]: 3.116 +- 0.049 | 3.052 / 3.117 / 3.182 | 3.113 | 3.121
h0[42]: 1.208 +- 0.064 | 1.126 / 1.212 / 1.284 | 1.212 | 1.219
h0[43]: 2.962 +- 0.049 | 2.898 / 2.964 / 3.027 | 2.961 | 2.969
h0[44]: 4.546 +- 0.046 | 4.488 / 4.542 / 4.601 | 4.538 | 4.547
h0[45]: 4.429 +- 0.045 | 4.372 / 4.427 / 4.484 | 4.423 | 4.431
h0[46]: 0.736 +- 0.068 | 0.648 / 0.74 / 0.819 | 0.74 | 0.747
h0[47]: 1.587 +- 0.06 | 1.509 / 1.588 / 1.66 | 1.589 | 1.597
h0[48]: 5.894 +- 0.051 | 5.83 / 5.892 / 5.96 | 5.883 | 5.893
h0[49]: 3.833 +- 0.046 | 3.774 / 3.834 / 3.889 | 3.828 | 3.837
h0[50]: 2.46 +- 0.053 | 2.392 / 2.463 / 2.526 | 2.46 | 2.467
h0[51]: 6.361 +- 0.055 | 6.293 / 6.359 / 6.433 | 6.348 | 6.358
h0[52]: 4.438 +- 0.045 | 4.381 / 4.436 / 4.493 | 4.432 | 4.44
h0[53]: 5.644 +- 0.049 | 5.584 / 5.64 / 5.708 | 5.633 | 5.643
h0[54]: 6.269 +- 0.054 | 6.201 / 6.268 / 6.34 | 6.257 | 6.267
h0[55]: 1.141 +- 0.065 | 1.059 / 1.145 / 1.218 | 1.145 | 1.152
h0[56]: 4.887 +- 0.046 | 4.83 / 4.884 / 4.945 | 4.879 | 4.888
h0[57]: 7.387 +- 0.064 | 7.304 / 7.383 / 7.471 | 7.371 | 7.381
h0[58]: 4.24 +- 0.045 | 4.182 / 4.238 / 4.294 | 4.234 | 4.243
h0[59]: 1.747 +- 0.059 | 1.671 / 1.748 / 1.818 | 1.749 | 1.756
h0[60]: 8.692 +- 0.079 | 8.596 / 8.689 / 8.793 | 8.672 | 8.683
h0[61]: 4.361 +- 0.045 | 4.304 / 4.359 / 4.414 | 4.354 | 4.363
h0[62]: 3.845 +- 0.046 | 3.786 / 3.846 / 3.901 | 3.84 | 3.849
h0[63]: 2.249 +- 0.055 | 2.179 / 2.251 / 2.318 | 2.25 | 2.257
h0[64]: 6.3 +- 0.054 | 6.233 / 6.299 / 6.372 | 6.287 | 6.297
h0[65]: 4.632 +- 0.046 | 4.576 / 4.628 / 4.687 | 4.625 | 4.634
h0[66]: 2.233 +- 0.055 | 2.162 / 2.234 / 2.302 | 2.233 | 2.241
h0[67]: 2.323 +- 0.054 | 2.254 / 2.325 / 2.391 | 2.324 | 2.331
h0[68]: 2.495 +- 0.053 | 2.428 / 2.498 / 2.561 | 2.495 | 2.503
h0[69]: 2.52 +- 0.052 | 2.453 / 2.523 / 2.586 | 2.52 | 2.527
h0[70]: 5.689 +- 0.05 | 5.629 / 5.685 / 5.755 | 5.679 | 5.688
h0[71]: 6.457 +- 0.055 | 6.39 / 6.454 / 6.53 | 6.444 | 6.454
h0[72]: 3.463 +- 0.047 | 3.402 / 3.466 / 3.524 | 3.46 | 3.468
h0[73]: 4.904 +- 0.046 | 4.846 / 4.901 / 4.963 | 4.896 | 4.905
h0[74]: 3.993 +- 0.046 | 3.934 / 3.993 / 4.049 | 3.988 | 3.997
h0[75]: 0.538 +- 0.068 | 0.452 / 0.543 / 0.621 | 0.543 | 0.55
h0[76]: 6.078 +- 0.052 | 6.011 / 6.076 / 6.146 | 6.066 | 6.076
h0[77]: 2.984 +- 0.049 | 2.92 / 2.986 / 3.049 | 2.982 | 2.99
h0[78]: 3.673 +- 0.046 | 3.615 / 3.674 / 3.73 | 3.669 | 3.677
h0[79]: 4.609 +- 0.046 | 4.553 / 4.605 / 4.663 | 4.602 | 4.611
h0[80]: 2.259 +- 0.054 | 2.189 / 2.261 / 2.328 | 2.259 | 2.267
h0[81]: 6.673 +- 0.057 | 6.604 / 6.67 / 6.748 | 6.659 | 6.669
h0[82]: 4.05 +- 0.046 | 3.99 / 4.049 / 4.105 | 4.044 | 4.053
h0[83]: 3.35 +- 0.047 | 3.288 / 3.353 / 3.412 | 3.347 | 3.355
h0[84]: 5.582 +- 0.049 | 5.523 / 5.579 / 5.646 | 5.572 | 5.581
h0[85]: 7.287 +- 0.063 | 7.206 / 7.284 / 7.37 | 7.271 | 7.282
h0[86]: 4.745 +- 0.046 | 4.69 / 4.74 / 4.802 | 4.738 | 4.747
h0[87]: 5.476 +- 0.049 | 5.418 / 5.472 / 5.538 | 5.466 | 5.476
h0[88]: 9.9999896 +- 8.4e-06 | 9.9999804 / 9.9999918 / 9.9999968 | 9.9999906 | 9.999991
h0[89]: 4.064 +- 0.045 | 4.005 / 4.064 / 4.12 | 4.059 | 4.067
h0[90]: 0.787 +- 0.068 | 0.7 / 0.791 / 0.87 | 0.792 | 0.798
h0[91]: 7.086 +- 0.061 | 7.01 / 7.082 / 7.167 | 7.071 | 7.081
h0[92]: 3.658 +- 0.046 | 3.599 / 3.659 / 3.715 | 3.654 | 3.662
h0[93]: 1.038 +- 0.066 | 0.955 / 1.04 / 1.116 | 1.042 | 1.049
h0[94]: 5.445 +- 0.048 | 5.387 / 5.441 / 5.507 | 5.435 | 5.444
h0[95]: 4.93 +- 0.046 | 4.871 / 4.927 / 4.988 | 4.921 | 4.931
h0[96]: 2.199 +- 0.055 | 2.128 / 2.2 / 2.268 | 2.199 | 2.207
h0[97]: 2.216 +- 0.055 | 2.146 / 2.218 / 2.285 | 2.217 | 2.224
h0[98]: 5.861 +- 0.051 | 5.797 / 5.858 / 5.927 | 5.85 | 5.859
h0[99]: 4.733 +- 0.046 | 4.678 / 4.729 / 4.79 | 4.726 | 4.735
h0[100]: 6.139 +- 0.053 | 6.072 / 6.138 / 6.208 | 6.127 | 6.137
h0[101]: 5.834 +- 0.051 | 5.771 / 5.831 / 5.901 | 5.823 | 5.833
h0[102]: 4.329 +- 0.045 | 4.271 / 4.327 / 4.383 | 4.322 | 4.331
h0[103]: 4.021 +- 0.046 | 3.961 / 4.02 / 4.076 | 4.015 | 4.024
h0[104]: 2.892 +- 0.05 | 2.828 / 2.894 / 2.956 | 2.891 | 2.899
h0[105]: 1.658 +- 0.06 | 1.581 / 1.659 / 1.731 | 1.661 | 1.668
h0[106]: 4.497 +- 0.045 | 4.44 / 4.494 / 4.553 | 4.49 | 4.499
h0[107]: 3.2 +- 0.048 | 3.137 / 3.202 / 3.264 | 3.197 | 3.206
h0[108]: 7.017 +- 0.061 | 6.943 / 7.014 / 7.096 | 7.002 | 7.012
h0[109]: 3.486 +- 0.047 | 3.425 / 3.489 / 3.546 | 3.483 | 3.491
h0[110]: 2.784 +- 0.051 | 2.717 / 2.786 / 2.847 | 2.782 | 2.79
h0[111]: 3.075 +- 0.049 | 3.011 / 3.076 / 3.14 | 3.072 | 3.08
h0[112]: 2.889 +- 0.05 | 2.824 / 2.891 / 2.953 | 2.887 | 2.895
h0[113]: 7.343 +- 0.064 | 7.261 / 7.34 / 7.427 | 7.327 | 7.337
h0[114]: 5.609 +- 0.049 | 5.55 / 5.606 / 5.673 | 5.599 | 5.608
h0[115]: 5.154 +- 0.047 | 5.096 / 5.152 / 5.216 | 5.145 | 5.154
h0[116]: 5.195 +- 0.047 | 5.139 / 5.193 / 5.257 | 5.186 | 5.196
h0[117]: 2.657 +- 0.051 | 2.589 / 2.659 / 2.72 | 2.656 | 2.664
h0[118]: 3.843 +- 0.046 | 3.783 / 3.843 / 3.898 | 3.838 | 3.846
h0[119]: 2.881 +- 0.05 | 2.817 / 2.884 / 2.945 | 2.88 | 2.888
--------
hT[#]: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
hT[0]: 9.615 +- 0.048 | 9.555 / 9.617 / 9.677 | 9.632 | 9.606
hT[1]: 6.299 +- 0.068 | 6.21 / 6.298 / 6.388 | 6.332 | 6.291
hT[2]: 8.85 +- 0.061 | 8.777 / 8.849 / 8.931 | 8.863 | 8.837
hT[3]: 7.089 +- 0.067 | 7.005 / 7.086 / 7.178 | 7.116 | 7.077
hT[4]: 7.662 +- 0.067 | 7.579 / 7.659 / 7.753 | 7.689 | 7.649
hT[5]: 4.513 +- 0.066 | 4.427 / 4.511 / 4.598 | 4.542 | 4.502
hT[6]: 9.99416 +- 0.00062 | 9.99339 / 9.99421 / 9.99496 | 9.99431 | 9.99406
hT[7]: 9.603 +- 0.046 | 9.546 / 9.603 / 9.659 | 9.618 | 9.594
hT[8]: 7.795 +- 0.07 | 7.708 / 7.795 / 7.887 | 7.818 | 7.78
hT[9]: 5.696 +- 0.073 | 5.602 / 5.695 / 5.79 | 5.732 | 5.691
hT[10]: 7.234 +- 0.069 | 7.149 / 7.23 / 7.324 | 7.259 | 7.22
hT[11]: 6.115 +- 0.07 | 6.018 / 6.114 / 6.208 | 6.149 | 6.109
hT[12]: 7.444 +- 0.076 | 7.349 / 7.442 / 7.539 | 7.462 | 7.426
hT[13]: 8.741 +- 0.054 | 8.672 / 8.742 / 8.814 | 8.75 | 8.729
hT[14]: 9.524 +- 0.058 | 9.448 / 9.524 / 9.598 | 9.55 | 9.515
hT[15]: 6.525 +- 0.066 | 6.44 / 6.523 / 6.61 | 6.554 | 6.514
hT[16]: 5.327 +- 0.074 | 5.233 / 5.326 / 5.424 | 5.363 | 5.322
hT[17]: 7.063 +- 0.069 | 6.977 / 7.059 / 7.154 | 7.088 | 7.049
hT[18]: 5.032 +- 0.066 | 4.948 / 5.032 / 5.118 | 5.063 | 5.023
hT[19]: 9.99907 +- 0.00011 | 9.99894 / 9.99907 / 9.99921 | 9.9991 | 9.99905
hT[20]: 5.067 +- 0.071 | 4.971 / 5.067 / 5.16 | 5.102 | 5.061
hT[21]: 7.747 +- 0.067 | 7.663 / 7.744 / 7.838 | 7.774 | 7.734
hT[22]: 9.477 +- 0.057 | 9.406 / 9.476 / 9.552 | 9.499 | 9.467
hT[23]: 8.272 +- 0.056 | 8.2 / 8.27 / 8.349 | 8.279 | 8.26
hT[24]: 5.665 +- 0.069 | 5.571 / 5.664 / 5.756 | 5.699 | 5.658
hT[25]: 5.201 +- 0.07 | 5.105 / 5.201 / 5.294 | 5.236 | 5.195
hT[26]: 8.384 +- 0.059 | 8.309 / 8.385 / 8.466 | 8.393 | 8.371
hT[27]: 7.378 +- 0.066 | 7.293 / 7.376 / 7.463 | 7.407 | 7.367
hT[28]: 6.809 +- 0.069 | 6.716 / 6.809 / 6.9 | 6.843 | 6.802
hT[29]: 7.434 +- 0.067 | 7.35 / 7.43 / 7.525 | 7.46 | 7.421
hT[30]: 6.193 +- 0.071 | 6.097 / 6.193 / 6.286 | 6.228 | 6.187
hT[31]: 8.639 +- 0.068 | 8.554 / 8.636 / 8.729 | 8.663 | 8.625
hT[32]: 6.633 +- 0.069 | 6.54 / 6.633 / 6.723 | 6.666 | 6.626
hT[33]: 4.172 +- 0.093 | 4.058 / 4.163 / 4.3 | 4.203 | 4.156
hT[34]: 4.039 +- 0.074 | 3.945 / 4.038 / 4.136 | 4.075 | 4.034
hT[35]: 7.393 +- 0.068 | 7.31 / 7.388 / 7.482 | 7.419 | 7.379
hT[36]: 7.293 +- 0.072 | 7.197 / 7.293 / 7.386 | 7.328 | 7.287
hT[37]: 9.235 +- 0.062 | 9.159 / 9.233 / 9.311 | 9.255 | 9.222
hT[38]: 7.261 +- 0.075 | 7.168 / 7.26 / 7.356 | 7.281 | 7.244
hT[39]: 7.143 +- 0.067 | 7.057 / 7.141 / 7.232 | 7.175 | 7.135
hT[40]: 8.615 +- 0.07 | 8.53 / 8.613 / 8.702 | 8.636 | 8.6
hT[41]: 8.036 +- 0.066 | 7.951 / 8.034 / 8.121 | 8.066 | 8.026
hT[42]: 8.248 +- 0.071 | 8.152 / 8.248 / 8.342 | 8.283 | 8.242
hT[43]: 6.38 +- 0.066 | 6.296 / 6.379 / 6.466 | 6.41 | 6.37
hT[44]: 4.898 +- 0.07 | 4.811 / 4.896 / 4.991 | 4.922 | 4.883
hT[45]: 9.042 +- 0.066 | 8.961 / 9.04 / 9.13 | 9.066 | 9.029
hT[46]: 3.231 +- 0.073 | 3.138 / 3.231 / 3.326 | 3.267 | 3.226
hT[47]: 4.308 +- 0.069 | 4.215 / 4.308 / 4.4 | 4.342 | 4.302
hT[48]: 8.021 +- 0.074 | 7.93 / 8.021 / 8.113 | 8.041 | 8.005
hT[49]: 7.711 +- 0.067 | 7.629 / 7.707 / 7.803 | 7.738 | 7.699
hT[50]: 3.415 +- 0.066 | 3.331 / 3.415 / 3.502 | 3.447 | 3.406
hT[51]: 8.219 +- 0.072 | 8.131 / 8.219 / 8.311 | 8.236 | 8.202
hT[52]: 8.154 +- 0.069 | 8.069 / 8.151 / 8.246 | 8.179 | 8.14
hT[53]: 9.9815 +- 0.0033 | 9.9773 / 9.9818 / 9.9854 | 9.9827 | 9.9811
hT[54]: 8.889 +- 0.065 | 8.809 / 8.889 / 8.97 | 8.905 | 8.875
hT[55]: 5.548 +- 0.071 | 5.452 / 5.548 / 5.641 | 5.583 | 5.542
hT[56]: 6.705 +- 0.071 | 6.618 / 6.705 / 6.798 | 6.728 | 6.69
hT[57]: 9.042 +- 0.052 | 8.979 / 9.043 / 9.112 | 9.052 | 9.031
hT[58]: 8.819 +- 0.067 | 8.737 / 8.815 / 8.906 | 8.844 | 8.806
hT[59]: 4.302 +- 0.069 | 4.212 / 4.301 / 4.391 | 4.335 | 4.295
hT[60]: 8.01 +- 0.054 | 7.943 / 8.009 / 8.076 | 8.005 | 7.997
hT[61]: 5.832 +- 0.058 | 5.755 / 5.835 / 5.902 | 5.836 | 5.82
hT[62]: 4.14 +- 0.058 | 4.063 / 4.142 / 4.212 | 4.145 | 4.129
hT[63]: 4.071 +- 0.064 | 3.987 / 4.07 / 4.148 | 4.081 | 4.065
hT[64]: 6.942 +- 0.064 | 6.857 / 6.944 / 7.018 | 6.939 | 6.926
hT[65]: 5.171 +- 0.059 | 5.095 / 5.174 / 5.243 | 5.174 | 5.159
hT[66]: 2.818 +- 0.064 | 2.733 / 2.816 / 2.895 | 2.828 | 2.811
hT[67]: 1.517 +- 0.063 | 1.434 / 1.516 / 1.593 | 1.527 | 1.511
hT[68]: 4.591 +- 0.063 | 4.51 / 4.59 / 4.666 | 4.6 | 4.584
hT[69]: 5.01 +- 0.063 | 4.929 / 5.009 / 5.085 | 5.019 | 5.003
hT[70]: 6.446 +- 0.062 | 6.365 / 6.449 / 6.518 | 6.444 | 6.43
hT[71]: 9.165 +- 0.049 | 9.099 / 9.166 / 9.222 | 9.163 | 9.153
hT[72]: 6.026 +- 0.059 | 5.948 / 6.027 / 6.097 | 6.032 | 6.016
hT[73]: 4.591 +- 0.059 | 4.515 / 4.594 / 4.661 | 4.593 | 4.578
hT[74]: 5.866 +- 0.058 | 5.789 / 5.869 / 5.939 | 5.87 | 5.855
hT[75]: 3.108 +- 0.075 | 3.007 / 3.105 / 3.199 | 3.122 | 3.104
hT[76]: 6.837 +- 0.064 | 6.752 / 6.839 / 6.911 | 6.834 | 6.821
hT[77]: 5.148 +- 0.061 | 5.069 / 5.149 / 5.223 | 5.156 | 5.14
hT[78]: 7.847 +- 0.059 | 7.769 / 7.849 / 7.919 | 7.852 | 7.837
hT[79]: 7.049 +- 0.059 | 6.972 / 7.051 / 7.121 | 7.051 | 7.036
hT[80]: 6.63 +- 0.064 | 6.545 / 6.628 / 6.706 | 6.64 | 6.623
hT[81]: 7.65 +- 0.063 | 7.568 / 7.649 / 7.725 | 7.646 | 7.634
hT[82]: 4.212 +- 0.058 | 4.135 / 4.215 / 4.284 | 4.216 | 4.201
hT[83]: 5.65 +- 0.059 | 5.573 / 5.651 / 5.722 | 5.656 | 5.641
hT[84]: 5.983 +- 0.062 | 5.902 / 5.986 / 6.053 | 5.982 | 5.968
hT[85]: 8.447 +- 0.056 | 8.379 / 8.448 / 8.516 | 8.443 | 8.433
hT[86]: 5.193 +- 0.059 | 5.117 / 5.195 / 5.264 | 5.195 | 5.18
hT[87]: 6.521 +- 0.061 | 6.441 / 6.524 / 6.589 | 6.52 | 6.506
hT[88]: 9.7386 +- 0.0081 | 9.7279 / 9.7393 / 9.7493 | 9.7397 | 9.7374
hT[89]: 5.434 +- 0.058 | 5.357 / 5.438 / 5.506 | 5.438 | 5.423
hT[90]: 1.875 +- 0.073 | 1.779 / 1.872 / 1.96 | 1.888 | 1.872
hT[91]: 9.09 +- 0.047 | 9.031 / 9.091 / 9.147 | 9.087 | 9.079
hT[92]: 4.513 +- 0.059 | 4.436 / 4.515 / 4.586 | 4.519 | 4.503
hT[93]: 2.223 +- 0.072 | 2.128 / 2.219 / 2.307 | 2.237 | 2.22
hT[94]: 8.624 +- 0.058 | 8.548 / 8.628 / 8.689 | 8.624 | 8.61
hT[95]: 8.509 +- 0.058 | 8.434 / 8.512 / 8.577 | 8.51 | 8.496
hT[96]: 7.018 +- 0.065 | 6.933 / 7.016 / 7.095 | 7.028 | 7.012
hT[97]: 1.607 +- 0.064 | 1.522 / 1.605 / 1.683 | 1.616 | 1.6
hT[98]: 9.366 +- 0.047 | 9.304 / 9.369 / 9.42 | 9.366 | 9.355
hT[99]: 7.945 +- 0.059 | 7.869 / 7.947 / 8.015 | 7.946 | 7.932
hT[100]: 9.352 +- 0.046 | 9.289 / 9.354 / 9.406 | 9.351 | 9.341
hT[101]: 3.98 +- 0.063 | 3.898 / 3.983 / 4.053 | 3.978 | 3.964
hT[102]: 7.875 +- 0.058 | 7.798 / 7.878 / 7.944 | 7.878 | 7.863
hT[103]: 3.052 +- 0.058 | 2.975 / 3.055 / 3.124 | 3.057 | 3.041
hT[104]: 2.761 +- 0.061 | 2.681 / 2.76 / 2.834 | 2.769 | 2.753
hT[105]: 2.476 +- 0.068 | 2.386 / 2.473 / 2.558 | 2.488 | 2.471
hT[106]: 7.555 +- 0.058 | 7.48 / 7.558 / 7.626 | 7.557 | 7.542
hT[107]: 3.069 +- 0.06 | 2.992 / 3.069 / 3.14 | 3.076 | 3.06
hT[108]: 6.8 +- 0.066 | 6.717 / 6.8 / 6.88 | 6.795 | 6.783
hT[109]: 6.185 +- 0.059 | 6.108 / 6.187 / 6.257 | 6.191 | 6.175
hT[110]: 4.507 +- 0.061 | 4.426 / 4.506 / 4.58 | 4.515 | 4.499
hT[111]: 4.714 +- 0.06 | 4.636 / 4.714 / 4.787 | 4.721 | 4.705
hT[112]: 3.585 +- 0.061 | 3.505 / 3.585 / 3.659 | 3.593 | 3.577
hT[113]: 5.673 +- 0.069 | 5.59 / 5.674 / 5.759 | 5.668 | 5.655
hT[114]: 5.168 +- 0.062 | 5.087 / 5.171 / 5.239 | 5.167 | 5.153
hT[115]: 8.309 +- 0.059 | 8.232 / 8.312 / 8.376 | 8.309 | 8.295
hT[116]: 4.965 +- 0.06 | 4.888 / 4.969 / 5.034 | 4.966 | 4.951
hT[117]: 1.514 +- 0.062 | 1.433 / 1.512 / 1.588 | 1.522 | 1.506
hT[118]: 3.73 +- 0.058 | 3.652 / 3.732 / 3.802 | 3.734 | 3.719
hT[119]: 5.658 +- 0.061 | 5.578 / 5.657 / 5.731 | 5.666 | 5.65
--------
noise_scale: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
noise_scale: 0.713 +- 0.016 | 0.694 / 0.712 / 0.734 | 0.71 | 0.71
--------
prior_h0_mean: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
prior_h0_mean: 4.487 +- 0.045 | 4.43 / 4.484 / 4.543 | 4.48 | 4.489
--------
prior_h0_scale: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
prior_h0_scale: 0.832 +- 0.012 | 0.817 / 0.831 / 0.849 | 0.829 | 0.83
--------
prior_h_change_control: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
prior_h_change_control: 1.987 +- 0.095 | 1.87 / 1.977 / 2.117 | 2.018 | 1.97
--------
prior_h_change_intervention_factor: mean +- std.dev. | 10%ile / 50%ile / 90%ile | MAP est. | max(L) est.
prior_h_change_intervention_factor: 0.871 +- 0.038 | 0.826 / 0.869 / 0.917 | 0.861 | 0.87
--------
/home/albert/git/jaxns/jaxns/plotting.py:47: RuntimeWarning: divide by zero encountered in divide
  efficiency = 1. / num_likelihood_evaluations_per_sample
../_images/examples_placebo_effect_12_3.png
../_images/examples_placebo_effect_12_4.png