RandomBinaryCriterion.lua 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990
  1. local RandomBinaryCriterion, parent = torch.class('w2nn.RandomBinaryCriterion','nn.Criterion')
  2. local function create_filters(ch, n, k, layers)
  3. local model = nn.Sequential()
  4. for i = 1, layers do
  5. local n_input = ch
  6. if i > 1 then
  7. n_input = n
  8. end
  9. local filter = w2nn.RandomBinaryConvolution(n_input, n, k, k)
  10. if i == 1 then
  11. -- channel identity
  12. for j = 1, ch do
  13. filter.weight[i]:fill(0)
  14. filter.weight[i][i][math.floor(k/2)+1][math.floor(k/2)+1] = 1
  15. end
  16. end
  17. model:add(filter)
  18. --if layers > 1 and i ~= layers then
  19. -- model:add(nn.Sigmoid(true))
  20. --end
  21. end
  22. return model
  23. end
  24. function RandomBinaryCriterion:__init(ch, n, k, layers)
  25. parent.__init(self)
  26. self.layers = layers or 1
  27. self.gamma = 0.1
  28. self.n = n or 32
  29. self.k = k or 3
  30. self.ch = ch
  31. self.filter1 = create_filters(self.ch, self.n, self.k, self.layers)
  32. self.filter2 = self.filter1:clone()
  33. self.diff = torch.Tensor()
  34. self.diff_abs = torch.Tensor()
  35. self.square_loss_buff = torch.Tensor()
  36. self.linear_loss_buff = torch.Tensor()
  37. self.input = torch.Tensor()
  38. self.target = torch.Tensor()
  39. end
  40. function RandomBinaryCriterion:updateOutput(input, target)
  41. if input:dim() == 2 then
  42. local k = math.sqrt(input:size(2) / self.ch)
  43. input = input:reshape(input:size(1), self.ch, k, k)
  44. end
  45. if target:dim() == 2 then
  46. local k = math.sqrt(target:size(2) / self.ch)
  47. target = target:reshape(target:size(1), self.ch, k, k)
  48. end
  49. self.input:resizeAs(input):copy(input):clamp(0, 1)
  50. self.target:resizeAs(target):copy(target):clamp(0, 1)
  51. local lb1 = self.filter1:forward(self.input)
  52. local lb2 = self.filter2:forward(self.target)
  53. -- huber loss
  54. self.diff:resizeAs(lb1):copy(lb1)
  55. for i = 1, lb1:size(1) do
  56. self.diff[i]:add(-1, lb2[i])
  57. end
  58. self.diff_abs:resizeAs(self.diff):copy(self.diff):abs()
  59. local square_targets = self.diff[torch.lt(self.diff_abs, self.gamma)]
  60. local linear_targets = self.diff[torch.ge(self.diff_abs, self.gamma)]
  61. local square_loss = self.square_loss_buff:resizeAs(square_targets):copy(square_targets):pow(2.0):mul(0.5):sum()
  62. local linear_loss = self.linear_loss_buff:resizeAs(linear_targets):copy(linear_targets):abs():add(-0.5 * self.gamma):mul(self.gamma):sum()
  63. --self.outlier_rate = linear_targets:nElement() / input:nElement()
  64. self.output = (square_loss + linear_loss) / lb1:nElement()
  65. return self.output
  66. end
  67. function RandomBinaryCriterion:updateGradInput(input, target)
  68. local d2 = false
  69. if input:dim() == 2 then
  70. d2 = true
  71. local k = math.sqrt(input:size(2) / self.ch)
  72. input = input:reshape(input:size(1), self.ch, k, k)
  73. end
  74. local norm = self.n / self.input:nElement()
  75. self.gradInput:resizeAs(self.diff):copy(self.diff):mul(norm)
  76. local outlier = torch.ge(self.diff_abs, self.gamma)
  77. self.gradInput[outlier] = torch.sign(self.diff[outlier]) * self.gamma * norm
  78. local grad_input = self.filter1:updateGradInput(input, self.gradInput)
  79. if d2 then
  80. grad_input = grad_input:reshape(grad_input:size(1), grad_input:size(2) * grad_input:size(3) * grad_input:size(4))
  81. end
  82. return grad_input
  83. end