Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor #15

Merged
merged 34 commits into from
May 2, 2023
Merged
Show file tree
Hide file tree
Changes from 24 commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
0859a7c
overall sketch of input and output layers
atenagm1375 Apr 25, 2023
59a8de1
review CorticalColumn.connect and fix proximal dendrites
atenagm1375 Apr 25, 2023
7518873
define neuron groups for all input and output populations
atenagm1375 Apr 25, 2023
1a5700c
add config for user-defined behavior, set dataloader and iteration at…
atenagm1375 Apr 26, 2023
f59a99f
isolate timestamps
saeedark Apr 26, 2023
b9a6f53
config class to make parameters
saeedark Apr 27, 2023
3674414
add deep config update
saeedark Apr 27, 2023
2e9baea
remove user defined module
saeedark Apr 28, 2023
81f7383
Remove extra layers from cortical column
saeedark Apr 28, 2023
b9aeab2
unify population naming scheme
saeedark Apr 28, 2023
40574f2
diverse connect method for cortical column
saeedark Apr 28, 2023
a4f39bd
InputLayer connect method
saeedark Apr 28, 2023
768820f
fix typo, change default value of fire in SpikingNeuronGroup to True
atenagm1375 Apr 28, 2023
ee8dad1
feat: Resolve types and typos
Apr 28, 2023
9d9b98e
feat: layers as network object
saeedark Apr 30, 2023
9a5cb1a
fix: setter for input population
saeedark Apr 30, 2023
d23f71c
add: Example and bug fixes
saeedark Apr 30, 2023
4cf51ba
fix: input2LayerConnection
saeedark Apr 30, 2023
4a6496d
fix typos and separate transforms from encoders
atenagm1375 Apr 30, 2023
3a3ee6d
fix: no decay for dendrite
saeedark Apr 30, 2023
c503647
Merge branch 'refactor' of https://github.com/cnrl/CoNeX into refactor
saeedark Apr 30, 2023
19c8d15
fix type
saeedark May 1, 2023
bee0047
fix imports
saeedark May 1, 2023
de37ca9
fix imports
saeedark May 1, 2023
57493d1
feat: Override dunder call method for better DX
May 1, 2023
7977ad3
Merge remote-tracking branch 'origin/refactor' into refactor
May 1, 2023
4885286
rename:
atenagm1375 May 2, 2023
bab73b3
edit priority values for easier referencing of occupied values
atenagm1375 May 2, 2023
3352139
fix: Priority orders
saeedark May 2, 2023
61b8a33
edit; python 3.9 as requirement
saeedark May 2, 2023
6e48e83
style: consistent delay making
saeedark May 2, 2023
fb7053c
feat: Layer2PopConnectionConfig
saeedark May 2, 2023
6cccf55
style: rename N to ndim
saeedark May 2, 2023
05c9a54
fix: io's first tag
saeedark May 2, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions Example/test/configs/l2_3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
from conex.behaviors.neurons.neuron_types.lif_neurons import AELIF, ELIF
from conex.nn.Config.layer_config import LayerConfig
from pymonntorch import *


class l2_3(LayerConfig):
exc_size = (10, 20, 20)
exc_neuron_params = {
"R": 20,
"tau": 25,
"threshold": -37,
"v_reset": -75,
"v_rest": -65,
"alpha": 0.3,
"beta": -0.4,
"w_tau": 10,
"delta": 0.9,
"theta_rh": -42,
}
exc_neuron_type = AELIF
exc_tau_trace = 2.7
exc_fire = True
exc_dendrite_params = {"distal_provocativeness": 0.5}

inh_size = (5, 20, 20)
inh_neuron_params = {
"R": 20,
"tau": 25,
"threshold": -37,
"v_reset": -75,
"v_rest": -65,
"delta": 0.8,
"theta_rh": -42,
}
inh_neuron_type = ELIF
inh_tau_trace = 2.7
inh_fire = True
inh_dendrite_params = {"distal_provocativeness": 0.5}

exc_exc_weight_init_params = {"mode": "uniform"}
exc_exc_structure = "Simple"
exc_exc_structure_params = {"current_coef": 3}

exc_inh_weight_init_params = {"mode": "uniform"}
exc_inh_structure = "Simple"
exc_inh_structure_params = {"current_coef": 3}

inh_exc_weight_init_params = {"mode": "uniform"}
inh_exc_structure = "Simple"
inh_exc_structure_params = {"current_coef": 3}

inh_inh_weight_init_params = {"mode": "uniform"}
inh_inh_structure = "Simple"
inh_inh_structure_params = {"current_coef": 3}
9 changes: 9 additions & 0 deletions Example/test/configs/l2_3_l5.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from conex.nn.Config.connection_config import Layer2LayerConnectionConfig


class l2_3_l5(Layer2LayerConnectionConfig):
exc_exc_weight_init_params = {"mode": "uniform"}
exc_exc_structure = "Simple"
exc_exc_structure_params = {"current_coef": 3}
exc_exc_src_pop = "exc_pop"
exc_exc_dst_pop = "exc_pop"
49 changes: 49 additions & 0 deletions Example/test/configs/l4.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from conex.behaviors.neurons.neuron_types.lif_neurons import LIF, ELIF
from conex.nn.Config.layer_config import LayerConfig
from pymonntorch import *


class l4(LayerConfig):
exc_size = (4, 25, 25)
exc_neuron_params = {
"R": 20,
"tau": 25,
"threshold": -37,
"v_reset": -75,
"v_rest": -65,
}
exc_neuron_type = LIF
exc_tau_trace = 2.7
exc_fire = True
exc_dendrite_params = {"distal_provocativeness": 0.5}

inh_size = (4, 5, 5)
inh_neuron_params = {
"R": 20,
"tau": 25,
"threshold": -37,
"v_reset": -75,
"v_rest": -65,
"delta": 0.8,
"theta_rh": -42,
}
inh_neuron_type = ELIF
inh_tau_trace = 2.7
inh_fire = True
inh_dendrite_params = {"distal_provocativeness": 0.5}

exc_exc_weight_init_params = {"mode": "uniform"}
exc_exc_structure = "Simple"
exc_exc_structure_params = {"current_coef": 3}

exc_inh_weight_init_params = {"mode": "uniform"}
exc_inh_structure = "Simple"
exc_inh_structure_params = {"current_coef": 3}

inh_exc_weight_init_params = {"mode": "uniform"}
inh_exc_structure = "Simple"
inh_exc_structure_params = {"current_coef": 3}

inh_inh_weight_init_params = {"mode": "uniform"}
inh_inh_structure = "Simple"
inh_inh_structure_params = {"current_coef": 3}
9 changes: 9 additions & 0 deletions Example/test/configs/l4_l2_3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from conex.nn.Config.connection_config import Layer2LayerConnectionConfig


class l4_l2_3(Layer2LayerConnectionConfig):
exc_exc_weight_init_params = {"mode": "uniform"}
exc_exc_structure = "Simple"
exc_exc_structure_params = {"current_coef": 3}
exc_exc_src_pop = "exc_pop"
exc_exc_dst_pop = "exc_pop"
51 changes: 51 additions & 0 deletions Example/test/configs/l5.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
from conex.behaviors.neurons.neuron_types.lif_neurons import ELIF
from conex.nn.Config.layer_config import LayerConfig
from pymonntorch import *


class l5(LayerConfig):
exc_size = (3, 5, 5)
exc_neuron_params = {
"R": 20,
"tau": 25,
"threshold": -37,
"v_reset": -75,
"v_rest": -65,
"delta": 0.8,
"theta_rh": -42,
}
exc_neuron_type = ELIF
exc_tau_trace = 2.7
exc_fire = True
exc_dendrite_params = {"distal_provocativeness": 0.5}

inh_size = (1, 5, 5)
inh_neuron_params = {
"R": 20,
"tau": 25,
"threshold": -37,
"v_reset": -75,
"v_rest": -65,
"delta": 0.8,
"theta_rh": -42,
}
inh_neuron_type = ELIF
inh_tau_trace = 2.7
inh_fire = True
inh_dendrite_params = {"distal_provocativeness": 0.5}

exc_exc_weight_init_params = {"mode": "uniform"}
exc_exc_structure = "Simple"
exc_exc_structure_params = {"current_coef": 3}

exc_inh_weight_init_params = {"mode": "uniform"}
exc_inh_structure = "Simple"
exc_inh_structure_params = {"current_coef": 3}

inh_exc_weight_init_params = {"mode": "uniform"}
inh_exc_structure = "Simple"
inh_exc_structure_params = {"current_coef": 3}

inh_inh_weight_init_params = {"mode": "uniform"}
inh_inh_structure = "Simple"
inh_inh_structure_params = {"current_coef": 3}
9 changes: 9 additions & 0 deletions Example/test/configs/l5_l2_3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from conex.nn.Config.connection_config import Layer2LayerConnectionConfig


class l5_l2_3(Layer2LayerConnectionConfig):
exc_exc_weight_init_params = {"mode": "uniform"}
exc_exc_structure = "Simple"
exc_exc_structure_params = {"current_coef": 3}
exc_exc_src_pop = "exc_pop"
exc_exc_dst_pop = "exc_pop"
9 changes: 9 additions & 0 deletions Example/test/configs/sens_l4.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from conex.nn.Config.connection_config import Input2LayerConnectionConfig


class sens_l4(Input2LayerConnectionConfig):
exc_weight_init_params = {"mode": "uniform", "weight_shape": [4, 1, 4, 4]}
exc_structure = "Conv2d"
exc_structure_params = {"current_coef": 3}
exc_dst_pop = "exc_pop"
exc_tag = "Proximal"
104 changes: 104 additions & 0 deletions Example/test/mnist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
from pymonntorch import *

from conex import *
from conex.nn.timestamps import NEURON_TIMESTAMPS

from conex.helpers.encoders import Poisson
from conex.helpers.transforms import *

from torchvision import transforms
from torchvision.datasets import MNIST
from torch.utils.data import DataLoader

import torch

# parameters
POISSON_TIME = 30
POISSON_RATIO = 5 / 30
MNIST_ROOT = "~/Temp/MNIST/"
SENSORY_SIZE_HEIGHT = 28
SENSORY_SIZE_WIDTH = 28 # MNIST's image size
SENSORY_TRACE_TAU_S = 2.7
DEVICE = "cuda"
DTYPE = torch.float16

from configs.l2_3 import *
from configs.l4 import *
from configs.l5 import *
from configs.sens_l4 import *
from configs.l2_3_l5 import *
from configs.l4_l2_3 import *
from configs.l5_l2_3 import *


#############################
# making dataloader
#############################
transformation = transforms.Compose(
[
transforms.ToTensor(),
SqueezeTransform(dim=0),
Poisson(timesteps=POISSON_TIME, ratio=POISSON_RATIO),
]
)

dataset = MNIST(root=MNIST_ROOT, train=True, download=True, transform=transformation)

dl = DataLoader(dataset, batch_size=16)
#############################


#############################
# initializing neocortex
#############################
net = Neocortex(settings={"device": DEVICE, "dtype": DTYPE})
#############################


#############################
# input layer
#############################
input_layer = InputLayer(
net=net,
input_dataloader=dl,
sensory_size=NeuronDimension(
depth=1, height=SENSORY_SIZE_HEIGHT, width=SENSORY_SIZE_WIDTH
),
sensory_trace=SENSORY_TRACE_TAU_S,
)

#############################


#############################
# making cortical column
#############################
cc1 = CorticalColumn(
net,
L2_3_config=l2_3().make(),
L4_config=l4().make(),
L5_config=l5().make(),
L6_config=None,
L6_L4_syn_config=None,
L4_L2_3_syn_config=l4_l2_3().make(),
L2_3_L5_syn_config=l2_3_l5().make(),
L5_L2_3_syn_config=l5_l2_3().make(),
)
#############################


#############################
# connect sensory to column
#############################
input_layer.connect(
cc1,
sensory_L4_syn_config=sens_l4().make(),
sensory_L6_syn_config=None,
location_L6_syn_config=None,
)
#############################


net.initialize()

net.simulate_iterations(100)
23 changes: 13 additions & 10 deletions conex/__init__.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
from conex.behaviours.network.neuromodulators import *
from conex.behaviours.network.timestep import *
from conex.behaviours.network.reward import *
from conex.behaviors.network.neuromodulators import *
from conex.behaviors.network.timestep import *
from conex.behaviors.network.reward import *

from conex.behaviours.neurons.neuron_types.lif_neurons import *
from conex.behaviours.neurons.sensory.dataset import *
from conex.behaviours.neurons.specs import *
from conex.behaviors.neurons.neuron_types.lif_neurons import *
from conex.behaviors.neurons.setters import *
from conex.behaviors.neurons.specs import *

from conex.behaviours.synapses.dendrites import *
from conex.behaviours.synapses.learning import *
from conex.behaviours.synapses.specs import *
from conex.behaviors.synapses.dendrites import *
from conex.behaviors.synapses.learning import *
from conex.behaviors.synapses.specs import *

from conex.behaviors.layer.dataset import *

from conex.nn.Structure import *
from conex.nn.Modules import *
from conex.nn.Modules import *
from conex.nn.Config import *
File renamed without changes.
62 changes: 62 additions & 0 deletions conex/behaviors/layer/dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""
Behaviors to load datasets
"""

from pymonntorch import Behavior
import torch


class SpikeNdDataset(Behavior):
"""
This behavior ease loading dataset as spikes for `InputLayer`.
"""

def initialize(self, layer):
self.dataloader = self.parameter("dataloader", None, required=True)
self.sensory_dimension = self.parameter("N_sensory", 2)
self.location_dimension = self.parameter("N_location", 2)
self.have_location = self.parameter("have_location", False)
self.have_sensory = self.parameter("have_sensory", True)
self.have_label = self.parameter("have_label", True)
self.loop = self.parameter("loop", True)

self.data_generator = self._get_data()
self.device = layer.device

def _get_data(self):
while self.loop:
for batch in self.dataloader:
batch_x = batch[0] if self.have_sensory else None
batch_loc = batch[self.have_sensory] if self.have_location else None
batch_y = batch[-1] if self.have_label else None
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Isn't this better, might add other things to dataloader in near future

Suggested change
batch_y = batch[-1] if self.have_label else None
batch_y = batch[1] if self.have_label else None

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No, The label should always be at the end. data loader can pass extra items before the label and after sensory and location data.


if batch_x is not None:
batch_x = batch_x.to(self.device)
batch_x = batch_x.view(
(-1, *batch_x.shape[-self.sensory_dimension :])
)
num_instance = batch_x.size(0)

if batch_loc is not None:
batch_loc = batch_loc.to(self.device)
batch_loc = batch_loc.view(
(-1, *batch_loc.shape[-self.location_dimension :])
)
num_instance = batch_loc.size(0)
if batch_x:
assert (
batch_x.size(0) == num_instance
), "sensory and location should have same number of instances."

if batch_y is not None:
batch_y = batch_y.to(self.device)
each_instance = num_instance // torch.numel(batch_y)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is not clear what variable num_instance refers to

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it's the number of iterations a batch will last.


for i in range(num_instance):
x = batch_x[i].view((-1,)) if batch_x is not None else None
loc = batch_loc[i].view((-1,)) if batch_loc is not None else None
y = batch_y[i // each_instance] if batch_y is not None else None
yield x, loc, y

def forward(self, layer):
layer.x, layer.loc, layer.y = next(self.data_generator)
File renamed without changes.
Loading