Skip to content

Commit

Permalink
formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
jgallowa07 committed Mar 11, 2024
1 parent fc89753 commit 9198572
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 11,409 deletions.
8 changes: 0 additions & 8 deletions multidms/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,7 +599,6 @@ def targets(self) -> dict:
"""The functional scores for each variant in the training data."""
return self._training_data["y"]

# TODO, rename mutparser
@property
def mutparser(self) -> MutationParser:
"""
Expand All @@ -608,7 +607,6 @@ def mutparser(self) -> MutationParser:
"""
return self._mutparser

# TODO, rename
@property
def parse_mut(self) -> MutationParser:
"""
Expand All @@ -618,7 +616,6 @@ def parse_mut(self) -> MutationParser:
"""
return self.mutparser.parse_mut

# TODO, document rename issue
@property
def parse_muts(self) -> partial:
"""
Expand All @@ -628,11 +625,6 @@ def parse_muts(self) -> partial:
"""
return self._parse_muts

# TODO should this be cached? how does caching interact with the way in
# which we applying this function in parallel?
# although, unless the variants are un-collapsed, this cache will be
# pretty useless.
# although it could be useful for the Model.add_phenotypes_to_df method.
def convert_subs_wrt_ref_seq(self, condition, aa_subs):
"""
Covert amino acid substitutions to be with respect to the reference sequence.
Expand Down
14 changes: 7 additions & 7 deletions multidms/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def __init__(
epistatic_model=multidms.biophysical.sigmoidal_global_epistasis,
output_activation=multidms.biophysical.identity_activation,
conditional_shifts=True,
alpha_d=False, # TODO raise issue to be squashed in this PR
alpha_d=False,
gamma_corrected=False,
PRNGKey=0,
init_beta_naught=0.0,
Expand Down Expand Up @@ -805,9 +805,9 @@ def add_phenotypes_to_df(
if phenotype_as_effect:
latent_predictions -= wildtype_df.loc[condition, "predicted_latent"]
latent_predictions[nan_variant_indices] = onp.nan
ret.loc[condition_df.index.values, latent_phenotype_col] = (
latent_predictions
)
ret.loc[
condition_df.index.values, latent_phenotype_col
] = latent_predictions

# func_score predictions on binary variants, X
phenotype_predictions = onp.array(
Expand All @@ -819,9 +819,9 @@ def add_phenotypes_to_df(
condition, "predicted_func_score"
]
phenotype_predictions[nan_variant_indices] = onp.nan
ret.loc[condition_df.index.values, observed_phenotype_col] = (
phenotype_predictions
)
ret.loc[
condition_df.index.values, observed_phenotype_col
] = phenotype_predictions

return ret

Expand Down
8 changes: 0 additions & 8 deletions multidms/model_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,10 +396,6 @@ def __init__(self, fit_models):
)
all_mutations = set.union(all_mutations, set(fit.data.mutations))

# add the final training loss to the fit_models dataframe
# fit_models["training_loss"] = fit_models.step_loss.apply(lambda x: x[-1])
# TODO rename to fit_models_df

# initialize empty columns for conditional loss
fit_models.assign(
**{
Expand Down Expand Up @@ -447,7 +443,6 @@ def all_mutations(self) -> tuple:
"""The mutations shared by each fitting dataset."""
return self._all_mutations

# TODO remove verbose everywhere
@lru_cache(maxsize=10)
def split_apply_combine_muts(
self,
Expand Down Expand Up @@ -1009,9 +1004,6 @@ def mut_type(mut):
return "stop" if mut.endswith("*") else "nonsynonymous"

# apply, drop, and melt
# TODO This throws deprecation warning
# because of the include_groups argument ...
# set to False, and lose the drop call after ...
sparsity_df = (
df.drop(columns=to_throw)
.assign(mut_type=lambda x: x.mutation.apply(mut_type))
Expand Down
Loading

0 comments on commit 9198572

Please sign in to comment.