Skip to content

Commit aff1653

Browse files
Remove some useless code.
1 parent b50ab15 commit aff1653

File tree

1 file changed

+0
-15
lines changed

1 file changed

+0
-15
lines changed

comfy/ldm/modules/attention.py

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -41,27 +41,12 @@ def exists(val):
4141
return val is not None
4242

4343

44-
def uniq(arr):
45-
return{el: True for el in arr}.keys()
46-
47-
4844
def default(val, d):
4945
if exists(val):
5046
return val
5147
return d
5248

5349

54-
def max_neg_value(t):
55-
return -torch.finfo(t.dtype).max
56-
57-
58-
def init_(tensor):
59-
dim = tensor.shape[-1]
60-
std = 1 / math.sqrt(dim)
61-
tensor.uniform_(-std, std)
62-
return tensor
63-
64-
6550
# feedforward
6651
class GEGLU(nn.Module):
6752
def __init__(self, dim_in, dim_out, dtype=None, device=None, operations=ops):

0 commit comments

Comments
 (0)