Shortcuts

Source code for mmcv.cnn.bricks.hsigmoid

# Copyright (c) OpenMMLab. All rights reserved.
import warnings

import torch
import torch.nn as nn
from mmengine.registry import MODELS


[docs]@MODELS.register_module() class HSigmoid(nn.Module): """Hard Sigmoid Module. Apply the hard sigmoid function: Hsigmoid(x) = min(max((x + bias) / divisor, min_value), max_value) Default: Hsigmoid(x) = min(max((x + 3) / 6, 0), 1) Note: In MMCV v1.4.4, we modified the default value of args to align with PyTorch official. Args: bias (float): Bias of the input feature map. Default: 3.0. divisor (float): Divisor of the input feature map. Default: 6.0. min_value (float): Lower bound value. Default: 0.0. max_value (float): Upper bound value. Default: 1.0. Returns: Tensor: The output tensor. """ def __init__(self, bias: float = 3.0, divisor: float = 6.0, min_value: float = 0.0, max_value: float = 1.0): super().__init__() warnings.warn( 'In MMCV v1.4.4, we modified the default value of args to align ' 'with PyTorch official. Previous Implementation: ' 'Hsigmoid(x) = min(max((x + 1) / 2, 0), 1). ' 'Current Implementation: ' 'Hsigmoid(x) = min(max((x + 3) / 6, 0), 1).') self.bias = bias self.divisor = divisor assert self.divisor != 0 self.min_value = min_value self.max_value = max_value
[docs] def forward(self, x: torch.Tensor) -> torch.Tensor: x = (x + self.bias) / self.divisor return x.clamp_(self.min_value, self.max_value)
Read the Docs v: latest
Versions
master
latest
2.x
1.x
v1.7.0
v1.6.2
v1.6.1
v1.6.0
v1.5.3
v1.5.2_a
v1.5.1
v1.5.0
v1.4.8
v1.4.7
v1.4.6
v1.4.5
v1.4.4
v1.4.3
v1.4.2
v1.4.1
v1.4.0
v1.3.18
v1.3.17
v1.3.16
v1.3.15
v1.3.14
v1.3.13
v1.3.12
v1.3.11
v1.3.10
v1.3.9
v1.3.8
v1.3.7
v1.3.6
v1.3.5
v1.3.4
v1.3.3
v1.3.2
v1.3.1
v1.3.0
Downloads
pdf
html
epub
On Read the Docs
Project Home
Builds

Free document hosting provided by Read the Docs.