Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

My changes #684

Open
wants to merge 24 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 23 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
3d85fcd
new model added
salah-daddi-nounou May 25, 2023
b04adc5
ajaccio, plot file
salah-daddi-nounou May 26, 2023
8cb0c38
trained and saved the network and assignments tensor
salah-daddi-nounou May 28, 2023
e947f72
training and testing files
salah-daddi-nounou May 30, 2023
f014e87
first commit in auguest
salah-daddi-nounou Aug 24, 2023
605dc53
created a test file
salah-daddi-nounou Aug 25, 2023
8cafc88
removed test file
salah-daddi-nounou Aug 25, 2023
dd2ff6b
aa
salah-daddi-nounou Aug 28, 2023
61f5774
updated
salah-daddi-nounou Aug 29, 2023
5302f56
changed Modified_PostPre to Bi_sigmoid in all commanding file
salah-daddi-nounou Aug 29, 2023
4c1b7ae
changed Modified_PostPre by Bi_sigmoid everywhere
salah-daddi-nounou Aug 29, 2023
9ae7c12
updated
salah-daddi-nounou Aug 30, 2023
11fef2b
last commit
salah-daddi-nounou Oct 25, 2023
7cbedcf
last commit
salah-daddi-nounou Oct 25, 2023
00bc191
last thing
salah-daddi-nounou Nov 29, 2023
4105060
simulation exp_15 finished
salah-daddi-nounou Jan 15, 2024
1bb3905
added notes.md, and modified eth_mnist.py
salah-daddi-nounou Jan 21, 2024
224985e
changed evaluate and my example, added notes and
salah-daddi-nounou Jan 25, 2024
8e84a06
modified ploting
salah-daddi-nounou Jan 26, 2024
adc3955
finished training
salah-daddi-nounou Feb 2, 2024
157fc25
added sacha file
salah-daddi-nounou Apr 24, 2024
26ac436
cleaned scripts
salah-daddi-nounou May 5, 2024
d5e30a4
clean2
salah-daddi-nounou May 5, 2024
e5b1687
added variability
salah-daddi-nounou Oct 28, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,6 @@ logs/*
.pytest_cache/*
.vscode/*
data/*
/examples/mnist/*.pt
/examples/mnist/draft*

227 changes: 227 additions & 0 deletions bindsnet/analysis/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -842,3 +842,230 @@ def plot_voltages(
plt.tight_layout()

return ims, axes

# I added this plot_traces which is completely based on voltage_plot, just changed the word voltage
def plot_traces(
traces: Dict[str, torch.Tensor],
ims: Optional[List[AxesImage]] = None,
axes: Optional[List[Axes]] = None,
time: Tuple[int, int] = None,
n_neurons: Optional[Dict[str, Tuple[int, int]]] = None,
cmap: Optional[str] = "jet",
plot_type: str = "color",
thresholds: Dict[str, torch.Tensor] = None,
figsize: Tuple[float, float] = (8.0, 4.5),
) -> Tuple[List[AxesImage], List[Axes]]:
# language=rst
"""
Plot traces for any group(s) of neurons.

:param traces: Contains trace data by neuron layers.
:param ims: Used for re-drawing the plots.
:param axes: Used for re-drawing the plots.
:param time: Plot traces of neurons in given time range. Default is entire
simulation time.
:param n_neurons: Plot traces of neurons in given range of neurons. Default is all
neurons.
:param cmap: Matplotlib colormap to use.
:param figsize: Horizontal, vertical figure size in inches.
:param plot_type: The way how to draw graph. 'color' for pcolormesh, 'line' for
curved lines.
:param thresholds: Thresholds of the neurons in each layer.
:return: ``ims, axes``: Used for re-drawing the plots.
"""
n_subplots = len(traces.keys())

# for key in traces.keys():
# traces[key] = traces[key].view(-1, traces[key].size(-1))
traces = {k: v.view(v.size(0), -1) for (k, v) in traces.items()}

if time is None:
for key in traces.keys():
time = (0, traces[key].size(0))
break

if n_neurons is None:
n_neurons = {}

for key, val in traces.items():
if key not in n_neurons.keys():
n_neurons[key] = (0, val.size(1))

if not ims:
fig, axes = plt.subplots(n_subplots, 1, figsize=figsize)
ims = []
if n_subplots == 1: # Plotting only one image
for v in traces.items():
if plot_type == "line":
ims.append(
axes.plot(
v[1]
.detach()
.clone()
.cpu()
.numpy()[
time[0] : time[1],
n_neurons[v[0]][0] : n_neurons[v[0]][1],
]
)
)

if thresholds is not None and thresholds[v[0]].size() == torch.Size(
[]
):
ims.append(
axes.axhline(
y=thresholds[v[0]].item(), c="r", linestyle="--"
)
)
else:
ims.append(
axes.pcolormesh(
v[1]
.cpu()
.numpy()[
time[0] : time[1],
n_neurons[v[0]][0] : n_neurons[v[0]][1],
]
.T,
cmap=cmap,
)
)

args = (v[0], n_neurons[v[0]][0], n_neurons[v[0]][1], time[0], time[1])
plt.title("%s traces for neurons (%d - %d) from t = %d to %d " % args)
plt.xlabel("Time (ms)")

if plot_type == "line":
plt.ylabel("trace")
else:
plt.ylabel("Neuron index")

axes.set_aspect("auto")

else: # Plot each layer at a time
for i, v in enumerate(traces.items()):
if plot_type == "line":
ims.append(
axes[i].plot(
v[1]
.cpu()
.numpy()[
time[0] : time[1],
n_neurons[v[0]][0] : n_neurons[v[0]][1],
]
)
)
if thresholds is not None and thresholds[v[0]].size() == torch.Size(
[]
):
ims.append(
axes[i].axhline(
y=thresholds[v[0]].item(), c="r", linestyle="--"
)
)
else:
ims.append(
axes[i].matshow(
v[1]
.cpu()
.numpy()[
time[0] : time[1],
n_neurons[v[0]][0] : n_neurons[v[0]][1],
]
.T,
cmap=cmap,
)
)
args = (v[0], n_neurons[v[0]][0], n_neurons[v[0]][1], time[0], time[1])
axes[i].set_title(
"%s traces for neurons (%d - %d) from t = %d to %d " % args
)

for ax in axes:
ax.set_aspect("auto")

if plot_type == "color":
plt.setp(axes, xlabel="Simulation time", ylabel="Neuron index")
elif plot_type == "line":
plt.setp(axes, xlabel="Simulation time", ylabel="trace")

plt.tight_layout()

else:
# Plotting figure given
if n_subplots == 1: # Plotting only one image
for v in traces.items():
axes.clear()
if plot_type == "line":
axes.plot(
v[1]
.cpu()
.numpy()[
time[0] : time[1], n_neurons[v[0]][0] : n_neurons[v[0]][1]
]
)
if thresholds is not None and thresholds[v[0]].size() == torch.Size(
[]
):
axes.axhline(y=thresholds[v[0]].item(), c="r", linestyle="--")
else:
axes.matshow(
v[1]
.cpu()
.numpy()[
time[0] : time[1], n_neurons[v[0]][0] : n_neurons[v[0]][1]
]
.T,
cmap=cmap,
)
args = (v[0], n_neurons[v[0]][0], n_neurons[v[0]][1], time[0], time[1])
axes.set_title(
"%s traces for neurons (%d - %d) from t = %d to %d " % args
)
axes.set_aspect("auto")

else:
# Plot each layer at a time
for i, v in enumerate(traces.items()):
axes[i].clear()
if plot_type == "line":
axes[i].plot(
v[1]
.cpu()
.numpy()[
time[0] : time[1], n_neurons[v[0]][0] : n_neurons[v[0]][1]
]
)
if thresholds is not None and thresholds[v[0]].size() == torch.Size(
[]
):
axes[i].axhline(
y=thresholds[v[0]].item(), c="r", linestyle="--"
)
else:
axes[i].matshow(
v[1]
.cpu()
.numpy()[
time[0] : time[1], n_neurons[v[0]][0] : n_neurons[v[0]][1]
]
.T,
cmap=cmap,
)
args = (v[0], n_neurons[v[0]][0], n_neurons[v[0]][1], time[0], time[1])
axes[i].set_title(
"%s traces for neurons (%d - %d) from t = %d to %d " % args
)

for ax in axes:
ax.set_aspect("auto")

if plot_type == "color":
plt.setp(axes, xlabel="Simulation time", ylabel="Neuron index")
elif plot_type == "line":
plt.setp(axes, xlabel="Simulation time", ylabel="trace")

plt.tight_layout()

return ims, axes
2 changes: 1 addition & 1 deletion bindsnet/evaluation/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def all_activity(
n_assigns = torch.sum(assignments == i).float()

if n_assigns > 0:
# Get indices of samples with this label.
# Get indices of samples with this label. # correcting : get the number of neurons with this label
indices = torch.nonzero(assignments == i).view(-1)

# Compute layer-wise firing rate for this label.
Expand Down
2 changes: 2 additions & 0 deletions bindsnet/learning/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
LearningRule,
NoOp,
PostPre,
Bi_sigmoid,
Rmax,
WeightDependentPostPre,
)
Expand All @@ -13,6 +14,7 @@
"LearningRule",
"NoOp",
"PostPre",
"Bi_sigmoid",
"WeightDependentPostPre",
"Hebbian",
"MSTDP",
Expand Down
79 changes: 79 additions & 0 deletions bindsnet/learning/learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,6 +389,11 @@ def _connection_update(self, **kwargs) -> None:
"""
Post-pre learning rule for ``Connection`` subclass of ``AbstractConnection``
class.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This need to be generalized, MNIST is only example.

self.source.s : 28 *28 array of 0 and 1 source_s : array converted to 1D vector (784*1)
self.target.x : array of 100 values (~1e-5) target_x : araray (20*5) (values ~1e-9)
source : first layer, target = second layer.
s: spike occurances (0 or 1) for each neuron; x : exp decaying trace
"""
batch_size = self.source.batch_size

Expand Down Expand Up @@ -549,6 +554,80 @@ def _conv3d_connection_update(self, **kwargs) -> None:

super().update()

#===================================================
class Bi_sigmoid(LearningRule):
# language=rst
"""
Bi_sigmoid STDP rule involving only post-synaptic spiking activity. The weight update
quantity is poisitive if the post-synaptic spike occures shortly after the presynatpic spike,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

poisitive --> positive

and negative otherwise.
"""

def __init__(
self,
connection: AbstractConnection,
nu: Optional[Union[float, Sequence[float], Sequence[torch.Tensor]]] = None,
reduction: Optional[callable] = None,
weight_decay: float = 0.0,
**kwargs,
) -> None:
# language=rst
"""
Constructor for ``Bi_sigmoid`` learning rule.

:param connection: An ``AbstractConnection`` object whose weights the
``Bi_sigmoid`` learning rule will modify.
:param nu: Single or pair of learning rates for pre- and post-synaptic events. It also
accepts a pair of tensors to individualize learning rates of each neuron.
In this case, their shape should be the same size as the connection weights.
:param reduction: Method for reducing parameter updates along the batch
dimension.
:param weight_decay: Coefficient controlling rate of decay of the weights each iteration.
"""
super().__init__(
connection=connection,
nu=nu,
reduction=reduction,
weight_decay=weight_decay,
**kwargs,
)

assert (
self.source.traces and self.target.traces
), "Both pre- and post-synaptic nodes must record spike traces."

if isinstance(connection, (Connection, LocalConnection)): # added: Bi_sigmoid will work only fore these 2 connections
self.update = self._connection_update # rewrites the update rule defined in the base class
else:
raise NotImplementedError(
"This learning rule is not supported for this Connection type."
)

def _connection_update(self, **kwargs) -> None:
# language=rst
"""
Bi_sigmoid learning rule for ``Connection`` subclass of ``AbstractConnection``
class.

self.source.s : 28 *28 array of 0 and 1 source_s : array converted to 1D vector (784*1)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same as before, the description need to be generalize, MNIST is only one way to use BindsNET

self.target.x2 : array of 100 values (~1e-5) target_x2 : araray (20*5) (values ~1e-9)
source : first layer, target = second layer.
s: spike occurances (0 or 1) for each neuron; x2 : bi_sigmoid decaying trace
In this rule we only use the spiking of post (target_s) and the bi_sigmoid trace of pre (source_x2)
"""
batch_size = self.source.batch_size

# Post-synaptic update.
if self.nu[1].any():
target_s = (self.target.s.view(batch_size, -1).unsqueeze(1).float() * self.nu[1]) # 100 values of 0&1
source_x2 = self.source.x2.view(batch_size, -1).unsqueeze(2) # 784 value 1D ( values between -1 and 1)
self.connection.w += self.reduction(torch.bmm(source_x2, target_s), dim=0)
del source_x2, target_s

super().update()


#===================================================

class WeightDependentPostPre(LearningRule):
# language=rst
Expand Down
2 changes: 2 additions & 0 deletions bindsnet/models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from bindsnet.models.models import (
DiehlAndCook2015,
Salah_model,
DiehlAndCook2015v2,
IncreasingInhibitionNetwork,
LocallyConnectedNetwork,
Expand All @@ -9,6 +10,7 @@
__all__ = [
"TwoLayerNetwork",
"DiehlAndCook2015v2",
"Salah_model",
"DiehlAndCook2015",
"IncreasingInhibitionNetwork",
"LocallyConnectedNetwork",
Expand Down
Loading
Loading