rlaphoenix / VSGAN

Consider removing the commented out code block PY-W0069
Anti-pattern
Major
8 months ago2 years old
Consider removing the commented out code block
 861        flops += self.num_heads * N * (self.dim // self.num_heads) * N
 862        #  x = (attn @ v)
 863        flops += self.num_heads * N * N * (self.dim // self.num_heads)
 864        # x = self.proj(x) 865        flops += N * self.dim * self.dim
 866        return flops
 867
Consider removing the commented out code block
 859        flops += N * self.dim * 3 * self.dim
 860        # attn = (q @ k.transpose(-2, -1))
 861        flops += self.num_heads * N * (self.dim // self.num_heads) * N
 862        #  x = (attn @ v) 863        flops += self.num_heads * N * N * (self.dim // self.num_heads)
 864        # x = self.proj(x)
 865        flops += N * self.dim * self.dim
Consider removing the commented out code block
 857        flops = 0
 858        # qkv = self.qkv(x)
 859        flops += N * self.dim * 3 * self.dim
 860        # attn = (q @ k.transpose(-2, -1)) 861        flops += self.num_heads * N * (self.dim // self.num_heads) * N
 862        #  x = (attn @ v)
 863        flops += self.num_heads * N * N * (self.dim // self.num_heads)
Consider removing the commented out code block
 855    def flops(self, N):
 856        # calculate flops for 1 window with token length of N
 857        flops = 0
 858        # qkv = self.qkv(x) 859        flops += N * self.dim * 3 * self.dim
 860        # attn = (q @ k.transpose(-2, -1))
 861        flops += self.num_heads * N * (self.dim // self.num_heads) * N
Consider removing the commented out code block
 678    def forward(self, x, x_size):
 679        h, w = x_size
 680        b, _, c = x.shape
 681        # assert seq_len == h * w, "input feature has wrong size" 682
 683        shortcut = x
 684        x = self.norm1(x)