Skip to content

Commit

Permalink
Develop (#303)
Browse files Browse the repository at this point in the history
* refactor workflows

* refactor SEs att

* formatter action

* formatter action

* formatter action

* formatter action

* formatter action

* reformatted ✨

* formatter action

* Attention refactor (#284)

* se

* fixed se models

* reformatted ✨

* tests for se models

Co-authored-by: FrancescoSaverioZuppichini <[email protected]>

* Feature/#285 pretrained (#287)

* pretrained weights refactor + logger

* reformatted ✨

* runners

* minor changes

* minor changes

* minor changes

* minor changes

* better tests

* fixtures

* minor changes

Co-authored-by: FrancescoSaverioZuppichini <[email protected]>

* Feature/refactor (#295)

* types

* types

* types

* broken images

* general refactor

- swith to torchvision StocasticDepth\n- more blocks in nn.blocks\n- full model list in hf store

* raise python version to 3.8

* minor changes

* hf token in action

* hf token in action

* refactor

* refactor

* hf storage models

now uses the hidden api to get the model names

* minor changes

* minor changes

* minor changes

* minor changes

* minor changes

* done (#298)

* better version of drop path (#302)

* reformatted ✨

Co-authored-by: nektos/act <nektos/[email protected]>
Co-authored-by: FrancescoSaverioZuppichini <[email protected]>
  • Loading branch information
3 people authored Feb 22, 2022
1 parent 39cbdc7 commit 75871bd
Show file tree
Hide file tree
Showing 15 changed files with 106 additions and 86 deletions.
4 changes: 2 additions & 2 deletions glasses/models/classification/deit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def __init__(
*args,
head: nn.Module = DeiTClassificationHead,
tokens: nn.Module = DeiTTokens,
**kwargs
**kwargs,
):
super().__init__(*args, head=head, tokens=tokens, **kwargs)

Expand Down Expand Up @@ -105,6 +105,6 @@ def deit_base_patch16_384(cls, **kwargs):
depth=12,
num_heads=12,
qkv_bias=True,
**kwargs
**kwargs,
)
return model
18 changes: 9 additions & 9 deletions glasses/models/classification/densenet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def __init__(
out_features: int,
activation: nn.Module = ReLUInPlace,
*args,
**kwargs
**kwargs,
):
super().__init__()
self.block = nn.Sequential(
Expand Down Expand Up @@ -72,7 +72,7 @@ def __init__(
out_features: int,
activation: nn.Module = ReLUInPlace,
expansion: int = 4,
**kwargs
**kwargs,
):
super().__init__(in_features, out_features, activation, **kwargs)
self.expansion = expansion
Expand All @@ -88,7 +88,7 @@ def __init__(
self.expanded_features,
kernel_size=1,
bias=False,
**kwargs
**kwargs,
),
"bn2": nn.BatchNorm2d(self.expanded_features),
"act2": activation(),
Expand All @@ -97,7 +97,7 @@ def __init__(
out_features,
kernel_size=3,
bias=False,
**kwargs
**kwargs,
),
}
)
Expand Down Expand Up @@ -154,7 +154,7 @@ def __init__(
block: nn.Module = DenseBottleNeckBlock,
transition_block: nn.Module = TransitionBlock,
*args,
**kwargs
**kwargs,
):
self.out_features = grow_rate * n + in_features
super().__init__(
Expand All @@ -166,7 +166,7 @@ def __init__(
# reduce the output features by a factor of 2
transition_block(self.out_features, *args, **kwargs)
if transition_block
else nn.Identity()
else nn.Identity(),
)


Expand All @@ -191,7 +191,7 @@ def __init__(
activation: nn.Module = ReLUInPlace,
block: nn.Module = DenseBottleNeckBlock,
*args,
**kwargs
**kwargs,
):
super().__init__(in_channels)
self.layers = nn.ModuleList([])
Expand Down Expand Up @@ -223,7 +223,7 @@ def __init__(
transition_block=lambda x: nn.Sequential(
nn.BatchNorm2d(self.widths[-1]), activation()
),
**kwargs
**kwargs,
)
)

Expand Down Expand Up @@ -275,7 +275,7 @@ def __init__(
encoder: nn.Module = DenseNetEncoder,
head: nn.Module = ResNetHead,
*args,
**kwargs
**kwargs,
):
super().__init__(encoder, head, *args, **kwargs)

Expand Down
6 changes: 3 additions & 3 deletions glasses/models/classification/efficientnet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(
drop_rate: float = 0.2,
se: bool = True,
kernel_size: int = 3,
**kwargs
**kwargs,
):
super().__init__()

Expand Down Expand Up @@ -138,7 +138,7 @@ def __init__(
drop_rate: float = 0.2,
stem: nn.Module = EfficientNetStem,
activation: nn.Module = partial(nn.SiLU, inplace=True),
**kwargs
**kwargs,
):
super().__init__()

Expand Down Expand Up @@ -314,7 +314,7 @@ def __init__(
encoder: nn.Module = EfficientNetEncoder,
head: nn.Module = EfficientNetHead,
*args,
**kwargs
**kwargs,
):
super().__init__(
encoder, partial(head, drop_rate=kwargs["drop_rate"]), *args, **kwargs
Expand Down
32 changes: 16 additions & 16 deletions glasses/models/classification/fishnet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def __init__(
depth: int = 1,
trans_depth: int = 1,
*args,
**kwargs
**kwargs,
):
super().__init__()

Expand All @@ -79,13 +79,13 @@ def __init__(
out_features,
shortcut=FishNetChannelReductionShortcut,
*args,
**kwargs
**kwargs,
),
*[
block(out_features, out_features, *args, **kwargs)
for _ in range(depth - 1)
],
nn.Upsample(scale_factor=2)
nn.Upsample(scale_factor=2),
)

def forward(self, x: Tensor, res: Tensor) -> Tensor:
Expand Down Expand Up @@ -115,7 +115,7 @@ def __init__(
block: nn.Module = FishNetBottleNeck,
depth: int = 1,
trans_depth: int = 1,
**kwargs
**kwargs,
):
super().__init__(
in_features,
Expand All @@ -124,13 +124,13 @@ def __init__(
block,
depth,
trans_depth,
**kwargs
**kwargs,
)

self.block = nn.Sequential(
block(in_features, out_features, shortcut=ResNetShorcut, **kwargs),
*[block(out_features, out_features, **kwargs) for _ in range(depth - 1)],
nn.MaxPool2d(kernel_size=2, stride=2)
nn.MaxPool2d(kernel_size=2, stride=2),
)


Expand Down Expand Up @@ -170,7 +170,7 @@ def __init__(
*[
FishNetBottleNeck(out_features, out_features, activation=activation)
for _ in range(depth - 1)
]
],
)
# very wrong SE implementation and application -> I have contacted the authors and he confirmed they got it wrong.
self.att = nn.Sequential(
Expand Down Expand Up @@ -208,12 +208,12 @@ def __init__(
depth: int = 1,
block: nn.Module = FishNetBottleNeck,
*args,
**kwargs
**kwargs,
):
super().__init__(
block(in_features, out_features, **kwargs),
*[block(out_features, out_features, **kwargs) for _ in range(depth - 1)],
nn.MaxPool2d(kernel_size=2, stride=2)
nn.MaxPool2d(kernel_size=2, stride=2),
)


Expand Down Expand Up @@ -251,7 +251,7 @@ def __init__(
block: nn.Module = FishNetBottleNeck,
stem: nn.Module = ResNetStemC,
activation: nn.Module = ReLUInPlace,
**kwargs
**kwargs,
):
super().__init__()

Expand All @@ -269,7 +269,7 @@ def __init__(
depth=depth,
block=block,
activation=activation,
**kwargs
**kwargs,
)
for (in_features, out_features), depth in zip(
self.tail_widths, tail_depths
Expand Down Expand Up @@ -301,8 +301,8 @@ def __init__(
trans_depth=trans_depth,
block=block,
activation=activation,
dilation=2 ** i,
padding=2 ** i,
dilation=2**i,
padding=2**i,
)
)

Expand Down Expand Up @@ -444,7 +444,7 @@ def __init__(
encoder: nn.Module = FishNetEncoder,
head: nn.Module = FishNetHead,
*args,
**kwargs
**kwargs,
):
super().__init__(encoder, head, *args, **kwargs)

Expand Down Expand Up @@ -483,7 +483,7 @@ def fishnet99(self, *args, **kwargs) -> FishNet:
body_trans_depths=body_trans_depths,
head_depths=head_depths,
head_trans_depths=head_trans_depths,
**kwargs
**kwargs,
)

@classmethod
Expand Down Expand Up @@ -513,5 +513,5 @@ def fishnet150(self, *args, **kwargs) -> FishNet:
body_trans_depths=body_trans_depths,
head_depths=head_depths,
head_trans_depths=head_trans_depths,
**kwargs
**kwargs,
)
2 changes: 1 addition & 1 deletion glasses/models/classification/mobilenet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,5 +29,5 @@ def mobilenet_v2(cls, *args, **kwargs) -> EfficientNet:
drop_rate=0,
activation=nn.ReLU6,
*args,
**kwargs
**kwargs,
)
26 changes: 13 additions & 13 deletions glasses/models/classification/regnet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def __init__(
out_features,
reduction=1,
groups=out_features // groups_width,
**kwargs
**kwargs,
)


Expand Down Expand Up @@ -291,7 +291,7 @@ def regnety_002(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -303,7 +303,7 @@ def regnety_004(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -315,7 +315,7 @@ def regnety_006(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -327,7 +327,7 @@ def regnety_008(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -339,7 +339,7 @@ def regnety_016(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -351,7 +351,7 @@ def regnety_032(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -363,7 +363,7 @@ def regnety_040(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -375,7 +375,7 @@ def regnety_064(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -387,7 +387,7 @@ def regnety_080(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -399,7 +399,7 @@ def regnety_120(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -411,7 +411,7 @@ def regnety_160(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)

@classmethod
Expand All @@ -423,5 +423,5 @@ def regnety_320(cls, *args, **kwargs):
widths=widths,
block=RegNetYBotteneckBlock,
groups_width=groups_width,
**kwargs
**kwargs,
)
Loading

0 comments on commit 75871bd

Please sign in to comment.