Skip to content

Commit

Permalink
(doc/fluxion) export Activation and ScaledDotProductAttention
Browse files Browse the repository at this point in the history
  • Loading branch information
Laurent2916 committed Feb 1, 2024
1 parent b8072ee commit 8d4c734
Showing 1 changed file with 17 additions and 2 deletions.
19 changes: 17 additions & 2 deletions src/refiners/fluxion/layers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
from refiners.fluxion.layers.activations import GLU, ApproximateGeLU, GeLU, ReLU, Sigmoid, SiLU
from refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d
from refiners.fluxion.layers.activations import (
GLU,
Activation,
ApproximateGeLU,
GeLU,
ReLU,
Sigmoid,
SiLU,
)
from refiners.fluxion.layers.attentions import (
Attention,
ScaledDotProductAttention,
SelfAttention,
SelfAttention2d,
)
from refiners.fluxion.layers.basics import (
Cos,
Flatten,
Expand Down Expand Up @@ -49,13 +62,15 @@
"GroupNorm",
"LayerNorm2d",
"InstanceNorm2d",
"Activation",
"GeLU",
"GLU",
"SiLU",
"ReLU",
"ApproximateGeLU",
"Sigmoid",
"Attention",
"ScaledDotProductAttention",
"SelfAttention",
"SelfAttention2d",
"Identity",
Expand Down

0 comments on commit 8d4c734

Please sign in to comment.