Skip to content
Snippets Groups Projects
Commit dbfdd4f2 authored by Aurélie saulquin's avatar Aurélie saulquin
Browse files

add quantizer test

parent adc7744b
Branches
No related tags found
1 merge request!3Dev
...@@ -18,6 +18,21 @@ from ..modnef_torch_neuron import ModNEFNeuron, _quantizer ...@@ -18,6 +18,21 @@ from ..modnef_torch_neuron import ModNEFNeuron, _quantizer
from math import log, ceil from math import log, ceil
from modnef.quantizer import * from modnef.quantizer import *
import torch.autograd as autograd
class QuantizeMembrane(autograd.Function):
@staticmethod
def forward(ctx, U, quantizer):
max_val = U.abs().max().detach() # Détachement pour éviter de bloquer le gradient
U_quant = quantizer(U, True)
ctx.save_for_backward(U, quantizer.scale_factor)
return U_quant
@staticmethod
def backward(ctx, grad_output):
grad_input, factor = ctx.saved_tensors
return grad_output, None
class RBLIF(ModNEFNeuron): class RBLIF(ModNEFNeuron):
""" """
ModNEFTorch reccurent BLIF neuron model ModNEFTorch reccurent BLIF neuron model
...@@ -243,10 +258,15 @@ class RBLIF(ModNEFNeuron): ...@@ -243,10 +258,15 @@ class RBLIF(ModNEFNeuron):
rec = self.reccurent(self.spk) rec = self.reccurent(self.spk)
# if self.quantization_flag:
# self.mem.data = self.quantizer(self.mem.data, True)
# input_.data = self.quantizer(input_.data, True)
# rec.data = self.quantizer(rec.data, True)
if self.quantization_flag: if self.quantization_flag:
self.mem.data = self.quantizer(self.mem.data, True) self.mem = QuantizeMembrane.apply(self.mem, self.quantizer)
input_.data = self.quantizer(input_.data, True) input_ = QuantizeMembrane.apply(input_, self.quantizer)
rec.data = self.quantizer(rec.data, True) rec = QuantizeMembrane.apply(rec, self.quantizer)
if self.reset_mechanism == "subtract": if self.reset_mechanism == "subtract":
self.mem = (self.mem+input_+rec)*self.beta-self.reset*self.threshold self.mem = (self.mem+input_+rec)*self.beta-self.reset*self.threshold
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment