• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

ControlNet / MARLIN / 9011167809

09 May 2024 01:56AM CUT coverage: 65.763%. First build
9011167809

Pull #25

github

web-flow
Merge 7c160e431 into 23491494b
Pull Request #25: hotfix: add lightning dependancy for _cosine_scheduler_fn(..)

461 of 701 relevant lines covered (65.76%)

0.66 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.92
/src/marlin_pytorch/model/encoder.py
1
from torch import nn, Tensor
1✔
2
from torch.nn import ModuleList, LayerNorm
1✔
3

4
from .modules import PatchEmbedding3d, Block
1✔
5
from .positional_embedding import SinCosPositionalEmbedding
1✔
6

7

8
class MarlinEncoder(nn.Module):
1✔
9

10
    def __init__(self, img_size=224, patch_size=16, n_frames=16, embed_dim=768, depth=12,
1✔
11
        num_heads=12, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop_rate=0., attn_drop_rate=0.,
12
        norm_layer="LayerNorm", init_values=0., tubelet_size=2
13
    ):
14
        super().__init__()
1✔
15

16
        self.embed_dim = embed_dim
1✔
17
        self.patch_embedding = PatchEmbedding3d(
1✔
18
            input_size=(3, n_frames, img_size, img_size),
19
            patch_size=(tubelet_size, patch_size, patch_size),
20
            embedding=embed_dim
21
        )
22
        num_patches = (img_size // patch_size) * (img_size // patch_size) * (n_frames // tubelet_size)
1✔
23

24
        # sine-cosine positional embeddings
25
        self.pos_embedding = SinCosPositionalEmbedding((num_patches, embed_dim), dropout_rate=0.)
1✔
26

27
        if norm_layer == "LayerNorm":
1✔
28
            self.norm_layer = LayerNorm
1✔
29
            self.norm = self.norm_layer(embed_dim)
1✔
30
        else:
31
            raise NotImplementedError("Only LayerNorm is supported")
×
32

33
        self.blocks = ModuleList([
1✔
34
            Block(
35
                dim=embed_dim, num_heads=num_heads, mlp_ratio=mlp_ratio, qkv_bias=qkv_bias, qk_scale=qk_scale,
36
                drop=drop_rate, attn_drop=attn_drop_rate, norm_layer=self.norm_layer,
37
                init_values=init_values)
38
            for _ in range(depth)
39
        ])
40

41
        self.apply(self._init_weights)
1✔
42

43
    @staticmethod
1✔
44
    def _init_weights(m):
1✔
45
        if isinstance(m, nn.Linear):
1✔
46
            nn.init.xavier_uniform_(m.weight)
1✔
47
            if isinstance(m, nn.Linear) and m.bias is not None:
1✔
48
                nn.init.constant_(m.bias, 0)
1✔
49
        elif isinstance(m, nn.LayerNorm):
1✔
50
            nn.init.constant_(m.bias, 0)
1✔
51
            nn.init.constant_(m.weight, 1.0)
1✔
52

53
    def forward_features(self, x):
1✔
54
        for block in self.blocks:
1✔
55
            x = block(x)
1✔
56
        x = self.norm(x)
1✔
57
        return x
1✔
58

59
    def forward(self, x: Tensor, mask: Tensor) -> Tensor:
1✔
60
        # mask: (B, T, N) with boolean values, 0 -> masked, 1 -> visible
61
        assert len(x.shape) == 5, "x must be 5D"
1✔
62
        emb = self.patch_embedding(x)
1✔
63
        emb = self.pos_embedding(emb)
1✔
64
        b, _, c = emb.shape
1✔
65
        emb = emb[mask].view(b, -1, c)  # only visible patches are used
1✔
66
        emb = self.forward_features(emb)
1✔
67
        return emb
1✔
68

69
    def extract_features(self, x: Tensor, seq_mean_pool: bool) -> Tensor:
1✔
70
        x = self.patch_embedding(x)
1✔
71
        x = self.pos_embedding(x)
1✔
72
        for block in self.blocks:
1✔
73
            x = block(x)
1✔
74

75
        if seq_mean_pool:
1✔
76
            x = x.mean(dim=1)
1✔
77
        x = self.norm(x)
1✔
78
        return x
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc