You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

conv.py 3.6 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. from ... import functional as F
  2. from .. import conv as Float
  3. from .module import QATModule
  4. class Conv2d(Float.Conv2d, QATModule):
  5. r"""A :class:`~.QATModule` :class:`~.module.Conv2d` with QAT support.
  6. Could be applied with :class:`~.Observer` and :class:`~.quantization.fake_quant.FakeQuantize`.
  7. """
  8. def calc_conv_qat(self, inp):
  9. w_qat = self.apply_quant_weight(self.weight)
  10. b_qat = self.apply_quant_bias(self.bias, inp, w_qat)
  11. conv = self.calc_conv(inp, w_qat, b_qat)
  12. return conv
  13. @classmethod
  14. def from_float_module(cls, float_module: Float.Conv2d):
  15. r"""
  16. Return a :class:`~.QATModule` instance converted from
  17. a float :class:`~.Module` instance.
  18. """
  19. qat_module = cls(
  20. float_module.in_channels,
  21. float_module.out_channels,
  22. float_module.kernel_size,
  23. float_module.stride,
  24. float_module.padding,
  25. float_module.dilation,
  26. float_module.groups,
  27. float_module.bias is not None,
  28. float_module.conv_mode,
  29. float_module.compute_mode,
  30. float_module.padding_mode,
  31. name=float_module.name,
  32. )
  33. qat_module.weight = float_module.weight
  34. qat_module.bias = float_module.bias
  35. return qat_module
  36. def forward(self, inp):
  37. return self.apply_quant_activation(self.calc_conv_qat(inp))
  38. class ConvRelu2d(Conv2d):
  39. r"""A :class:`~.QATModule` include :class:`~.module.Conv2d` and :func:`~.relu` with QAT support.
  40. Could be applied with :class:`~.Observer` and :class:`~.quantization.fake_quant.FakeQuantize`.
  41. """
  42. def forward(self, inp):
  43. return self.apply_quant_activation(F.relu(self.calc_conv_qat(inp)))
  44. class ConvTranspose2d(Float.ConvTranspose2d, QATModule):
  45. r"""A :class:`~.QATModule` :class:`~.module.ConvTranspose2d` with QAT support.
  46. Could be applied with :class:`~.Observer` and :class:`~.quantization.fake_quant.FakeQuantize`.
  47. """
  48. def calc_conv_transpose2d_qat(self, inp):
  49. w_qat = self.apply_quant_weight(self.weight)
  50. b_qat = self.apply_quant_bias(self.bias, inp, w_qat)
  51. conv_transpose2d = self.calc_conv_transpose2d(inp, w_qat, b_qat)
  52. return conv_transpose2d
  53. @classmethod
  54. def from_float_module(cls, float_module: Float.ConvTranspose2d):
  55. r"""
  56. Return a :class:`~.QATModule` instance converted from
  57. a float :class:`~.Module` instance.
  58. """
  59. qat_module = cls(
  60. float_module.in_channels,
  61. float_module.out_channels,
  62. float_module.kernel_size,
  63. float_module.stride,
  64. float_module.padding,
  65. float_module.output_padding,
  66. float_module.dilation,
  67. float_module.groups,
  68. float_module.bias is not None,
  69. float_module.conv_mode,
  70. float_module.compute_mode,
  71. name=float_module.name,
  72. )
  73. qat_module.weight = float_module.weight
  74. qat_module.bias = float_module.bias
  75. return qat_module
  76. def forward(self, inp):
  77. return self.apply_quant_activation(self.calc_conv_transpose2d_qat(inp))
  78. class ConvTransposeRelu2d(ConvTranspose2d):
  79. r"""A :class:`~.QATModule` include :class:`~.module.ConvTranspose2d` and :func:`~.relu` with QAT support.
  80. Could be applied with :class:`~.Observer` and :class:`~.quantization.fake_quant.FakeQuantize`.
  81. """
  82. def forward(self, inp):
  83. return self.apply_quant_activation(F.relu(self.calc_conv_transpose2d_qat(inp)))