Skip to content

Commit

Permalink
Add correct DTYPE
Browse files Browse the repository at this point in the history
  • Loading branch information
pomonam committed Mar 19, 2024
1 parent f0b77f0 commit 9181430
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 35 deletions.
68 changes: 34 additions & 34 deletions tests/gpu_tests/ddp_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,40 +189,40 @@ def test_self_scores(self) -> None:
rtol=1e-3,
)

# def test_lr_pairwise_scores(self) -> None:
# pairwise_scores = self.analyzer.load_pairwise_scores(scores_name="single_gpu_qb")
#
# score_args = ScoreArguments(
# score_dtype=torch.float64,
# per_sample_gradient_dtype=torch.float64,
# precondition_dtype=torch.float64,
# query_gradient_rank=32
# )
# self.analyzer.compute_pairwise_scores(
# scores_name="ddp_qb",
# factors_name=OLD_FACTOR_NAME,
# query_dataset=self.eval_dataset,
# train_dataset=self.train_dataset,
# train_indices=list(range(TRAIN_INDICES)),
# query_indices=list(range(QUERY_INDICES)),
# per_device_query_batch_size=12,
# per_device_train_batch_size=512,
# score_args=score_args,
# overwrite_output_dir=True,
# )
# new_pairwise_scores = self.analyzer.load_pairwise_scores(scores_name="ddp_qb")
#
# if LOCAL_RANK == 0:
# print(f"Previous score: {pairwise_scores[ALL_MODULE_NAME][0]}")
# print(f"Previous shape: {pairwise_scores[ALL_MODULE_NAME].shape}")
# print(f"New score: {new_pairwise_scores[ALL_MODULE_NAME][0]}")
# print(f"New shape: {new_pairwise_scores[ALL_MODULE_NAME].shape}")
# assert check_tensor_dict_equivalence(
# pairwise_scores,
# new_pairwise_scores,
# atol=1e-5,
# rtol=1e-3,
# )
def test_lr_pairwise_scores(self) -> None:
pairwise_scores = self.analyzer.load_pairwise_scores(scores_name="single_gpu_qb")

score_args = ScoreArguments(
score_dtype=torch.float64,
per_sample_gradient_dtype=torch.float64,
precondition_dtype=torch.float64,
query_gradient_rank=32
)
self.analyzer.compute_pairwise_scores(
scores_name="ddp_qb",
factors_name=OLD_FACTOR_NAME,
query_dataset=self.eval_dataset,
train_dataset=self.train_dataset,
train_indices=list(range(TRAIN_INDICES)),
query_indices=list(range(QUERY_INDICES)),
per_device_query_batch_size=12,
per_device_train_batch_size=512,
score_args=score_args,
overwrite_output_dir=True,
)
new_pairwise_scores = self.analyzer.load_pairwise_scores(scores_name="ddp_qb")

if LOCAL_RANK == 0:
print(f"Previous score: {pairwise_scores[ALL_MODULE_NAME][0]}")
print(f"Previous shape: {pairwise_scores[ALL_MODULE_NAME].shape}")
print(f"New score: {new_pairwise_scores[ALL_MODULE_NAME][0]}")
print(f"New shape: {new_pairwise_scores[ALL_MODULE_NAME].shape}")
assert check_tensor_dict_equivalence(
pairwise_scores,
new_pairwise_scores,
atol=1e-5,
rtol=1e-3,
)

@classmethod
def tearDownClass(cls) -> None:
Expand Down
5 changes: 4 additions & 1 deletion tests/gpu_tests/prepare_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,10 @@ def run_analysis() -> None:
)

score_args = ScoreArguments(
query_gradient_rank=32
query_gradient_rank=32,
score_dtype=torch.float64,
per_sample_gradient_dtype=torch.float64,
precondition_dtype=torch.float64,
)
analyzer.compute_pairwise_scores(
scores_name="single_gpu_qb",
Expand Down

0 comments on commit 9181430

Please sign in to comment.