Skip to content

Commit

Permalink
merge with main
Browse files Browse the repository at this point in the history
  • Loading branch information
shengfukevin committed Aug 23, 2024
2 parents 24e5024 + f066749 commit 2b42e9b
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 7 deletions.
12 changes: 6 additions & 6 deletions et_replay/tools/comm_replay.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import logging
import os
import time
from typing import Dict, List, Set, Tuple
from typing import Dict, List, Set, Tuple, Union

import numpy as np
import torch
Expand Down Expand Up @@ -560,7 +560,7 @@ def resetComms(self):

def getCommGroupInfo(
self, curComm: commsArgs, commsParams: commsParamsHolderBase
) -> tuple[int, str]:
) -> Tuple[int, str]:
"""
Return the group infomation of the current process group
including group rank of the local process, and a description string for logging purpose.
Expand Down Expand Up @@ -616,8 +616,8 @@ def generate_io_tensors(
self,
curComm: commsArgs,
commsParams: commsParamsHolderBase,
regenerateTensors: bool
) -> Tuple[torch.Tensor, torch.Tensor]:
regenerateTensors: bool,
) -> Tuple[torch.Tensor, Union[List[torch.Tensor], torch.Tensor]]:
# Use exactly specified inMsgSize/outMsgSize if call from trace replay
# This avoid regenerating sizes such as in _prep_all_gather_base
commsParams.size_from_trace = True
Expand Down Expand Up @@ -738,7 +738,7 @@ def commRebalance(self, curComm: commsArgs) -> None:
# Pass in curComm to modify it in the trace
self.rebalanceSplit(curComm)

def runCompute(self, func, curBlockStack: str) -> tuple[float, float]:
def runCompute(self, func, curBlockStack: str) -> Tuple[float, float]:
"""
Replays a specified compute operation and records metrics for benchmarking.
Expand Down Expand Up @@ -773,7 +773,7 @@ def runCompute(self, func, curBlockStack: str) -> tuple[float, float]:

def runComms(
self, collName: str, curComm: commsArgs, curBlockStack: str
) -> tuple[float, float]:
) -> Tuple[float, float]:
"""
Replays collective communication operation and records metrics for benchmarking.
Expand Down
3 changes: 2 additions & 1 deletion train/comms/pt/commsTraceReplay.py
Original file line number Diff line number Diff line change
Expand Up @@ -947,7 +947,8 @@ def recordCommReplay(
recordComm = curComm.toDict()

dtypeSizeMap = {
v: torch.tensor([], dtype=v).element_size() for v in self.dtypeMap.values()
k: torch.tensor([], dtype=v).element_size()
for k, v in self.dtypeMap.items()
}
recordComm["dtype_size"] = dtypeSizeMap.get(curComm.dtype, 0)
recordComm["marker_stack"] = curBlockStack
Expand Down

0 comments on commit 2b42e9b

Please sign in to comment.