Skip to content

Commit

Permalink
Remove some useless code.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Feb 22, 2025
1 parent b50ab15 commit aff1653
Showing 1 changed file with 0 additions and 15 deletions.
15 changes: 0 additions & 15 deletions comfy/ldm/modules/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,27 +41,12 @@ def exists(val):
return val is not None


def uniq(arr):
return{el: True for el in arr}.keys()


def default(val, d):
if exists(val):
return val
return d


def max_neg_value(t):
return -torch.finfo(t.dtype).max


def init_(tensor):
dim = tensor.shape[-1]
std = 1 / math.sqrt(dim)
tensor.uniform_(-std, std)
return tensor


# feedforward
class GEGLU(nn.Module):
def __init__(self, dim_in, dim_out, dtype=None, device=None, operations=ops):
Expand Down

0 comments on commit aff1653

Please sign in to comment.