Skip to content

Commit

Permalink
Merge pull request #161 from tsunhopang/extrinsic_parameter_sampling_…
Browse files Browse the repository at this point in the history
…improvement_clean

Reparameterization of extrinsic parameter for better sampling efficiency (clean)
  • Loading branch information
kazewong authored Sep 20, 2024
2 parents 3d3b20a + 766c36e commit b993358
Show file tree
Hide file tree
Showing 6 changed files with 511 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/pre-commit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,4 @@ jobs:
python -m pip install pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
python -m pip install .
- uses: pre-commit/[email protected].0
- uses: pre-commit/[email protected].1
41 changes: 30 additions & 11 deletions src/jimgw/jim.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,19 +104,38 @@ def posterior(self, params: Float[Array, " n_dim"], data: dict):

def sample(self, key: PRNGKeyArray, initial_position: Array = jnp.array([])):
if initial_position.size == 0:
initial_position = jnp.zeros((self.sampler.n_chains, self.prior.n_dim)) + jnp.nan

while not jax.tree.reduce(jnp.logical_and, jax.tree.map(lambda x: jnp.isfinite(x), initial_position)).all():
non_finite_index = jnp.where(jnp.any(~jax.tree.reduce(jnp.logical_and, jax.tree.map(lambda x: jnp.isfinite(x), initial_position)),axis=1))[0]
initial_position = (
jnp.zeros((self.sampler.n_chains, self.prior.n_dim)) + jnp.nan
)

while not jax.tree.reduce(
jnp.logical_and,
jax.tree.map(lambda x: jnp.isfinite(x), initial_position),
).all():
non_finite_index = jnp.where(
jnp.any(
~jax.tree.reduce(
jnp.logical_and,
jax.tree.map(lambda x: jnp.isfinite(x), initial_position),
),
axis=1,
)
)[0]

key, subkey = jax.random.split(key)
guess = self.prior.sample(subkey, self.sampler.n_chains)
for transform in self.sample_transforms:
guess = jax.vmap(transform.forward)(guess)
guess = jnp.array(jax.tree.leaves({key: guess[key] for key in self.parameter_names})).T
finite_guess = jnp.where(jnp.all(jax.tree.map(lambda x: jnp.isfinite(x), guess),axis=1))[0]
guess = jnp.array(
jax.tree.leaves({key: guess[key] for key in self.parameter_names})
).T
finite_guess = jnp.where(
jnp.all(jax.tree.map(lambda x: jnp.isfinite(x), guess), axis=1)
)[0]
common_length = min(len(finite_guess), len(non_finite_index))
initial_position = initial_position.at[non_finite_index[:common_length]].set(guess[:common_length])
initial_position = initial_position.at[
non_finite_index[:common_length]
].set(guess[:common_length])
self.sampler.sample(initial_position, None) # type: ignore

def maximize_likelihood(
Expand Down Expand Up @@ -157,7 +176,7 @@ def print_summary(self, transform: bool = True):
training_chain = self.add_name(training_chain)
if transform:
for sample_transform in reversed(self.sample_transforms):
training_chain = sample_transform.backward(training_chain)
training_chain = jax.vmap(sample_transform.backward)(training_chain)
training_log_prob = train_summary["log_prob"]
training_local_acceptance = train_summary["local_accs"]
training_global_acceptance = train_summary["global_accs"]
Expand All @@ -167,7 +186,7 @@ def print_summary(self, transform: bool = True):
production_chain = self.add_name(production_chain)
if transform:
for sample_transform in reversed(self.sample_transforms):
production_chain = sample_transform.backward(production_chain)
production_chain = jax.vmap(sample_transform.backward)(production_chain)
production_log_prob = production_summary["log_prob"]
production_local_acceptance = production_summary["local_accs"]
production_global_acceptance = production_summary["global_accs"]
Expand Down Expand Up @@ -223,10 +242,10 @@ def get_samples(self, training: bool = False) -> dict:
else:
chains = self.sampler.get_sampler_state(training=False)["chains"]

chains = chains.transpose(2, 0, 1)
chains = chains.reshape(-1, self.prior.n_dim)
chains = self.add_name(chains)
for sample_transform in reversed(self.sample_transforms):
chains = sample_transform.backward(chains)
chains = jax.vmap(sample_transform.backward)(chains)
return chains

def plot(self):
Expand Down
9 changes: 9 additions & 0 deletions src/jimgw/single_event/likelihood.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,15 @@ def __init__(self, detectors: list[Detector], waveform: Waveform) -> None:
self.waveform = waveform


class ZeroLikelihood(LikelihoodBase):

def __init__(self):
pass

def evaluate(self, params: dict[str, Float], data: dict) -> Float:
return 0.0


class TransientLikelihoodFD(SingleEventLiklihood):
def __init__(
self,
Expand Down
Loading

0 comments on commit b993358

Please sign in to comment.