You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
111 lines
2.6 KiB
Python
111 lines
2.6 KiB
Python
from imaginairy.vendored.refiners.fluxion.layers.activations import GLU, ApproximateGeLU, GeLU, ReLU, Sigmoid, SiLU
|
|
from imaginairy.vendored.refiners.fluxion.layers.attentions import Attention, SelfAttention, SelfAttention2d
|
|
from imaginairy.vendored.refiners.fluxion.layers.basics import (
|
|
Buffer,
|
|
Chunk,
|
|
Cos,
|
|
Flatten,
|
|
GetArg,
|
|
Identity,
|
|
Multiply,
|
|
Parameter,
|
|
Permute,
|
|
Reshape,
|
|
Sin,
|
|
Slicing,
|
|
Squeeze,
|
|
Transpose,
|
|
Unbind,
|
|
Unflatten,
|
|
Unsqueeze,
|
|
View,
|
|
)
|
|
from imaginairy.vendored.refiners.fluxion.layers.chain import (
|
|
Breakpoint,
|
|
Chain,
|
|
Concatenate,
|
|
Distribute,
|
|
Lambda,
|
|
Matmul,
|
|
Parallel,
|
|
Passthrough,
|
|
Residual,
|
|
Return,
|
|
SetContext,
|
|
Sum,
|
|
UseContext,
|
|
)
|
|
from imaginairy.vendored.refiners.fluxion.layers.conv import Conv2d, ConvTranspose2d
|
|
from imaginairy.vendored.refiners.fluxion.layers.converter import Converter
|
|
from imaginairy.vendored.refiners.fluxion.layers.embedding import Embedding
|
|
from imaginairy.vendored.refiners.fluxion.layers.linear import Linear, MultiLinear
|
|
from imaginairy.vendored.refiners.fluxion.layers.maxpool import MaxPool1d, MaxPool2d
|
|
from imaginairy.vendored.refiners.fluxion.layers.module import ContextModule, Module, WeightedModule
|
|
from imaginairy.vendored.refiners.fluxion.layers.norm import GroupNorm, InstanceNorm2d, LayerNorm, LayerNorm2d
|
|
from imaginairy.vendored.refiners.fluxion.layers.padding import ReflectionPad2d
|
|
from imaginairy.vendored.refiners.fluxion.layers.pixelshuffle import PixelUnshuffle
|
|
from imaginairy.vendored.refiners.fluxion.layers.sampling import Downsample, Interpolate, Upsample
|
|
|
|
__all__ = [
|
|
"Embedding",
|
|
"LayerNorm",
|
|
"GroupNorm",
|
|
"LayerNorm2d",
|
|
"InstanceNorm2d",
|
|
"GeLU",
|
|
"GLU",
|
|
"SiLU",
|
|
"ReLU",
|
|
"ApproximateGeLU",
|
|
"Sigmoid",
|
|
"Attention",
|
|
"SelfAttention",
|
|
"SelfAttention2d",
|
|
"Identity",
|
|
"GetArg",
|
|
"View",
|
|
"Flatten",
|
|
"Unflatten",
|
|
"Transpose",
|
|
"Permute",
|
|
"Squeeze",
|
|
"Unsqueeze",
|
|
"Reshape",
|
|
"Slicing",
|
|
"Parameter",
|
|
"Sin",
|
|
"Cos",
|
|
"Chunk",
|
|
"Multiply",
|
|
"Unbind",
|
|
"Matmul",
|
|
"Buffer",
|
|
"Lambda",
|
|
"Return",
|
|
"Sum",
|
|
"Residual",
|
|
"Chain",
|
|
"UseContext",
|
|
"SetContext",
|
|
"Parallel",
|
|
"Distribute",
|
|
"Passthrough",
|
|
"Breakpoint",
|
|
"Concatenate",
|
|
"Conv2d",
|
|
"ConvTranspose2d",
|
|
"Linear",
|
|
"MultiLinear",
|
|
"Downsample",
|
|
"Upsample",
|
|
"Module",
|
|
"WeightedModule",
|
|
"ContextModule",
|
|
"Interpolate",
|
|
"ReflectionPad2d",
|
|
"PixelUnshuffle",
|
|
"Converter",
|
|
"MaxPool1d",
|
|
"MaxPool2d",
|
|
]
|