Skip to content

Commit

Permalink
'Refactored by Sourcery'
Browse files Browse the repository at this point in the history
  • Loading branch information
Sourcery AI committed Jul 28, 2023
1 parent e4b834c commit 1fd4e39
Show file tree
Hide file tree
Showing 8 changed files with 116 additions and 143 deletions.
61 changes: 19 additions & 42 deletions analysis_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,8 @@ def tri_filter(signal, kernel_delta):
width of kernel in datapoints
"""
kernel = np.append(np.arange(kernel_delta / 2), np.arange(kernel_delta / 2, -1, -1))
# convolve2d has proven PAINFULLY slow for some reason
# signal_conv = convolve2d(signal,kernel,'same')
new_signal = []
for x in signal:
new_signal.append(convolve(x, kernel, "same"))
signal_conv = np.array(new_signal)
return signal_conv
new_signal = [convolve(x, kernel, "same") for x in signal]
return np.array(new_signal)


def correlate_signals(signal1, signal2):
Expand Down Expand Up @@ -66,8 +61,7 @@ def avg_dotprod_signals(signal1, signal2):
non_silent_sigs.sort()
product = signal1[non_silent_sigs] * signal2[non_silent_sigs]
prod_sum = product.sum(axis=1)
avg_dot_product = prod_sum.mean()
return avg_dot_product
return prod_sum.mean()


def ndp_signals(signal1, signal2):
Expand Down Expand Up @@ -103,14 +97,10 @@ def avg_dotprod_signals_tbinned(signal1, signal2, len_bin=1000):
signal2 = np.reshape(signal2[:, 0 : int((signal2.shape[1] / len_bin) * len_bin)], (signal2.shape[0], signal2.shape[1] / len_bin, len_bin), len_bin)
signal2 = signal2[:, 0:5, :]

sig1 = []
for x in signal1:
sig1.append(normalize(x, axis=1))
sig1 = [normalize(x, axis=1) for x in signal1]
signal1 = np.array(sig1)

sig2 = []
for x in signal2:
sig2.append(normalize(x, axis=1))
sig2 = [normalize(x, axis=1) for x in signal2]
signal2 = np.array(sig2)

product = signal1 * signal2
Expand All @@ -120,8 +110,7 @@ def avg_dotprod_signals_tbinned(signal1, signal2, len_bin=1000):

for x in silent_sigs:
prod_sum[x[0], x[1]] = np.NaN
avg_dot_product = np.nanmean(prod_sum, axis=0)
return avg_dot_product
return np.nanmean(prod_sum, axis=0)


def time_stamps_to_signal(time_stamps, dt_signal, t_start, t_stop):
Expand All @@ -135,8 +124,7 @@ def time_stamps_to_signal(time_stamps, dt_signal, t_start, t_stop):
for x in time_stamps:
curr_idc = []
if np.any(x):
for y in x:
curr_idc.append((y - t_start) / dt_signal)
curr_idc.extend((y - t_start) / dt_signal for y in x)
time_idc.append(curr_idc)

# Set the spike indices to 1
Expand Down Expand Up @@ -173,11 +161,10 @@ def similarity_measure_leutgeb_BUGGY(signal1, signal2, len_bin):
signal2 = np.reshape(signal2[:, 0 : int((signal2.shape[1] / len_bin) * len_bin)], (signal2.shape[0], signal2.shape[1] / len_bin, len_bin), len_bin)
signal2 = signal2.sum(axis=2)

corr_vector = []

for x in range(signal1.shape[1]):
corr_vector.append(pearsonr(signal1[:, x], signal2[:, x])[0])

corr_vector = [
pearsonr(signal1[:, x], signal2[:, x])[0]
for x in range(signal1.shape[1])
]
return np.array(corr_vector)


Expand All @@ -189,10 +176,10 @@ def similarity_measure_leutgeb(signal1, signal2, len_bin):
signal2 = np.reshape(signal2[:, 0 : int(len_bin * int(signal2.shape[1] / len_bin))], (signal2.shape[0], int(signal2.shape[1] / len_bin), len_bin))
signal2 = signal2.sum(axis=2)
pdb.set_trace()
corr_vector = []

for x in range(signal1.shape[1]):
corr_vector.append(pearsonr(signal1[:, x], signal2[:, x])[0])
corr_vector = [
pearsonr(signal1[:, x], signal2[:, x])[0]
for x in range(signal1.shape[1])
]
pdb.set_trace()
return np.array(corr_vector)

Expand Down Expand Up @@ -239,27 +226,17 @@ def sqrt_diff_norm(signal1, signal2, len_bin):
def inner_pearsonr_BUGGY(signal1, len_bin):
signal1 = np.reshape(signal1[:, 0 : int((signal1.shape[1] / len_bin) * len_bin)], (signal1.shape[0], signal1.shape[1] / len_bin, len_bin), len_bin)
return signal1
signal1 = signal1.sum(axis=2)

corr_vector = []

for x in range(signal1.shape[1]):
corr_vector.append(pearsonr(signal1[:, 0], signal1[:, x])[0])

return corr_vector


def inner_pearsonr(signal1, len_bin):
signal1 = np.reshape(signal1, (signal1.shape[0], signal1.shape[1] / len_bin, len_bin))

signal1 = signal1.sum(axis=2)

corr_vector = []

for x in range(signal1.shape[1]):
corr_vector.append(pearsonr(signal1[:, 0], signal1[:, x])[0])

return corr_vector
return [
pearsonr(signal1[:, 0], signal1[:, x])[0]
for x in range(signal1.shape[1])
]


if __name__ == "__main__":
Expand Down
35 changes: 15 additions & 20 deletions ouropy/genconnection.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ def __init__(self):

def get_description(self):
"""Return a descriptive string for the connection"""
name = self.pre_pop.name + ' to ' + self.post_pop.name + '\n'
name = f'{self.pre_pop.name} to {self.post_pop.name}' + '\n'
pre_cell_targets = '\n'.join([str(x) for x in self.pre_cell_targets])
return name + pre_cell_targets

def get_name(self):
if type(self.pre_pop) == str:
return self.pre_pop + ' to ' + str(self.post_pop)
return f'{self.pre_pop} to {str(self.post_pop)}'
else:
return str(self.pre_pop) + ' to ' + str(self.post_pop)
return f'{str(self.pre_pop)} to {str(self.post_pop)}'

def get_properties(self):
"""Get the and make them suitable for pickling"""
Expand Down Expand Up @@ -128,12 +128,12 @@ def __init__(self, pre_pop, post_pop,

for idx, curr_cell_pos in enumerate(pre_pop_pos):

curr_dist = []
for post_cell_pos in post_pop_pos:
curr_dist.append(euclidian_dist(curr_cell_pos, post_cell_pos))

curr_dist = [
euclidian_dist(curr_cell_pos, post_cell_pos)
for post_cell_pos in post_pop_pos
]
sort_idc = np.argsort(curr_dist)
closest_cells = sort_idc[0:target_pool]
closest_cells = sort_idc[:target_pool]
picked_cells = np.random.choice(closest_cells,
divergence,
replace=False)
Expand Down Expand Up @@ -248,20 +248,18 @@ def __init__(self, pre_pop, post_pop,

for idx, curr_cell_pos in enumerate(pre_pop_pos):

curr_dist = []
for post_cell_pos in post_pop_pos:
curr_dist.append(euclidian_dist(curr_cell_pos, post_cell_pos))

curr_dist = [
euclidian_dist(curr_cell_pos, post_cell_pos)
for post_cell_pos in post_pop_pos
]
sort_idc = np.argsort(curr_dist)
closest_cells = sort_idc[0:target_pool]
closest_cells = sort_idc[:target_pool]
picked_cells = np.random.choice(closest_cells,
divergence,
replace=False)
pre_cell_target.append(picked_cells)
for tar_c in picked_cells:

curr_syns = []
curr_netcons = []
curr_conductances = []

curr_syn = h.pyr2pyr(post_pop[tar_c].soma(0.5))
Expand Down Expand Up @@ -293,14 +291,11 @@ def __init__(self, pre_pop, post_pop,
curr_syn.Cdur_nmda = Cdur_nmda
curr_syn.gbar_nmda = gbar_nmda

curr_syns.append(curr_syn)
curr_syns = [curr_syn]
curr_netcon = h.NetCon(pre_pop[idx].soma(0.5)._ref_v,
curr_syn, thr, Delay,
weight, sec=pre_pop[idx].soma)
#curr_gvec = h.Vector()
#curr_gvec.record(curr_syn._ref_g)
#curr_conductances.append(curr_gvec)
curr_netcons.append(curr_netcon)
curr_netcons = [curr_netcon]
netcons.append(curr_netcons)
synapses.append(curr_syns)
conductances.append(curr_conductances)
Expand Down
Loading

0 comments on commit 1fd4e39

Please sign in to comment.