Continuous-attractor Neural Network

[1]:
import matplotlib.pyplot as plt
import numpy as np
from sklearn.decomposition import PCA
[2]:
import brainpy as bp
import brainpy.math as bm

bm.set_platform('cpu')

Model

[3]:
class CANN1D(bp.dyn.NeuGroup):
  def __init__(self, num, tau=1., k=8.1, a=0.5, A=10., J0=4.,
               z_min=-bm.pi, z_max=bm.pi, **kwargs):
    super(CANN1D, self).__init__(size=num, **kwargs)

    # parameters
    self.tau = tau  # The synaptic time constant
    self.k = k  # Degree of the rescaled inhibition
    self.a = a  # Half-width of the range of excitatory connections
    self.A = A  # Magnitude of the external input
    self.J0 = J0  # maximum connection value

    # feature space
    self.z_min = z_min
    self.z_max = z_max
    self.z_range = z_max - z_min
    self.x = bm.linspace(z_min, z_max, num)  # The encoded feature values
    self.rho = num / self.z_range  # The neural density
    self.dx = self.z_range / num  # The stimulus density

    # variables
    self.u = bm.Variable(bm.zeros(num))
    self.input = bm.Variable(bm.zeros(num))

    # The connection matrix
    self.conn_mat = self.make_conn(self.x)

    # function
    self.integral = bp.odeint(self.derivative)

  def derivative(self, u, t, Iext):
    r1 = bm.square(u)
    r2 = 1.0 + self.k * bm.sum(r1)
    r = r1 / r2
    Irec = bm.dot(self.conn_mat, r)
    du = (-u + Irec + Iext) / self.tau
    return du

  def dist(self, d):
    d = bm.remainder(d, self.z_range)
    d = bm.where(d > 0.5 * self.z_range, d - self.z_range, d)
    return d

  def make_conn(self, x):
    assert bm.ndim(x) == 1
    x_left = bm.reshape(x, (-1, 1))
    x_right = bm.repeat(x.reshape((1, -1)), len(x), axis=0)
    d = self.dist(x_left - x_right)
    Jxx = self.J0 * bm.exp(-0.5 * bm.square(d / self.a)) / (bm.sqrt(2 * bm.pi) * self.a)
    return Jxx

  def get_stimulus_by_pos(self, pos):
    return self.A * bm.exp(-0.25 * bm.square(self.dist(self.x - pos) / self.a))

  def update(self, _t, _dt):
    self.u[:] = self.integral(self.u, _t, self.input)
    self.input[:] = 0.

  def cell(self, u):
    return self.derivative(u, 0., 0.)

Helper functions

[4]:
def find_fixed_points(cann, do_pca=False, do_animation=False, tolerance=1e-8):
  candidates = cann.get_stimulus_by_pos(bm.arange(-bm.pi, bm.pi, 0.005).reshape((-1, 1)))

  finder = bp.analysis.SlowPointFinder(f_cell=cann.cell)
  finder.find_fps_with_gd_method(
    candidates=candidates,
    tolerance=1e-6, num_batch=200,
    optimizer=bp.optim.Adam(lr=bp.optim.ExponentialDecay(0.1, 2, 0.999)),
  )
  finder.filter_loss(tolerance)
  finder.keep_unique()
  # finder.exclude_outliers(tolerance=1e1)

  print('Losses of fixed points:')
  print(finder.losses)

  if do_pca:
    pca = PCA(2)
    fp_pcs = pca.fit_transform(finder.fixed_points)
    plt.plot(fp_pcs[:, 0], fp_pcs[:, 1], 'x', label='fixed points')
    plt.xlabel('PC 1')
    plt.ylabel('PC 2')
    plt.title('Fixed points PCA')
    plt.legend()
    plt.show()

  if do_animation:
    bp.visualize.animate_1D(
      dynamical_vars={'ys': finder.fixed_points, 'xs': cann.x, 'legend': 'fixed point'},
      frame_step=1, frame_delay=100, show=True,
    )

  return finder.fixed_points
[5]:
def verify_fp_through_simulation(cann, fixed_points, num=3):
  for i in range(num):
    cann.u[:] = fixed_points[i]
    runner = bp.StructRunner(cann, monitors=['u'], dyn_vars=cann.vars())
    runner(100.)
    plt.plot(runner.mon.ts, runner.mon.u.max(axis=1))
    plt.ylim(0, runner.mon.u.max() + 1)
    plt.show()
[6]:
def verify_fixed_point_stability(cann, fixed_points, num=3):
  finder = bp.analysis.SlowPointFinder(f_cell=cann.cell)
  J = finder.compute_jacobians(fixed_points[:num])

  for i in range(num):
    eigval, eigvec = np.linalg.eig(np.asarray(J[i]))
    plt.figure()
    plt.scatter(np.real(eigval), np.imag(eigval))
    plt.plot([0, 0], [-1, 1], '--')
    plt.xlabel('Real')
    plt.ylabel('Imaginary')
    plt.show()
[7]:
def visualize_fixed_points(fps, plot_ids=(0,), xs=None):
  for i in plot_ids:
    if xs is None:
      plt.plot(fps[i], label=f'FP-{i}')
    else:
      plt.plot(xs, fps[i], label=f'FP-{i}')
  plt.legend()
  plt.show()

Find fixed points

[8]:
model = CANN1D(num=512, k=0.1, A=30, a=0.5)
[9]:
fps = find_fixed_points(model, do_pca=True, do_animation=False)
Optimizing with <brainpy.optimizers.optimizer.Adam object at 0x000002CF196E3EE0> to find fixed points:
    Batches 1-200 in 2.41 sec, Training loss 0.0062773521
    Batches 201-400 in 2.34 sec, Training loss 0.0002697103
    Batches 401-600 in 2.28 sec, Training loss 0.0002632072
    Batches 601-800 in 2.22 sec, Training loss 0.0002627503
    Batches 801-1000 in 2.13 sec, Training loss 0.0002622550
    Batches 1001-1200 in 2.18 sec, Training loss 0.0002617309
    Batches 1201-1400 in 2.12 sec, Training loss 0.0002611840
    Batches 1401-1600 in 2.11 sec, Training loss 0.0002606197
    Batches 1601-1800 in 2.07 sec, Training loss 0.0002600420
    Batches 1801-2000 in 2.09 sec, Training loss 0.0002594532
    Batches 2001-2200 in 2.17 sec, Training loss 0.0002588564
    Batches 2201-2400 in 2.11 sec, Training loss 0.0002582530
    Batches 2401-2600 in 2.08 sec, Training loss 0.0002576449
    Batches 2601-2800 in 2.03 sec, Training loss 0.0002570331
    Batches 2801-3000 in 2.12 sec, Training loss 0.0002564183
    Batches 3001-3200 in 2.17 sec, Training loss 0.0002558025
    Batches 3201-3400 in 2.06 sec, Training loss 0.0002551852
    Batches 3401-3600 in 2.03 sec, Training loss 0.0002545676
    Batches 3601-3800 in 2.05 sec, Training loss 0.0002539508
    Batches 3801-4000 in 2.05 sec, Training loss 0.0002533341
    Batches 4001-4200 in 2.02 sec, Training loss 0.0002527188
    Batches 4201-4400 in 2.37 sec, Training loss 0.0002521052
    Batches 4401-4600 in 2.11 sec, Training loss 0.0002514938
    Batches 4601-4800 in 2.28 sec, Training loss 0.0002508847
    Batches 4801-5000 in 2.17 sec, Training loss 0.0002502785
    Batches 5001-5200 in 2.13 sec, Training loss 0.0002496749
    Batches 5201-5400 in 2.01 sec, Training loss 0.0002490755
    Batches 5401-5600 in 1.95 sec, Training loss 0.0002484802
    Batches 5601-5800 in 1.98 sec, Training loss 0.0002478891
    Batches 5801-6000 in 2.08 sec, Training loss 0.0002473027
    Batches 6001-6200 in 2.45 sec, Training loss 0.0002467218
    Batches 6201-6400 in 2.39 sec, Training loss 0.0002461464
    Batches 6401-6600 in 2.17 sec, Training loss 0.0002455772
    Batches 6601-6800 in 2.08 sec, Training loss 0.0002450153
    Batches 6801-7000 in 2.06 sec, Training loss 0.0002444602
    Batches 7001-7200 in 2.10 sec, Training loss 0.0002439133
    Batches 7201-7400 in 2.56 sec, Training loss 0.0002433742
    Batches 7401-7600 in 2.63 sec, Training loss 0.0002428445
    Batches 7601-7800 in 1.97 sec, Training loss 0.0002423242
    Batches 7801-8000 in 1.97 sec, Training loss 0.0002418143
    Batches 8001-8200 in 2.01 sec, Training loss 0.0002413151
    Batches 8201-8400 in 2.06 sec, Training loss 0.0002408274
    Batches 8401-8600 in 2.09 sec, Training loss 0.0002403518
    Batches 8601-8800 in 2.17 sec, Training loss 0.0002398884
    Batches 8801-9000 in 2.11 sec, Training loss 0.0002394382
    Batches 9001-9200 in 2.06 sec, Training loss 0.0002390021
    Batches 9201-9400 in 2.33 sec, Training loss 0.0002385799
    Batches 9401-9600 in 2.10 sec, Training loss 0.0002381730
    Batches 9601-9800 in 2.02 sec, Training loss 0.0002377807
    Batches 9801-10000 in 1.98 sec, Training loss 0.0002374041
Excluding fixed points with squared speed above tolerance 1e-08:
    Kept 608/1257 fixed points with tolerance under 1e-08.
Excluding non-unique fixed points:
    Kept 608/608 unique fixed points with uniqueness tolerance 0.025.
Losses of fixed points:
[2.5202692e-11 9.5351966e-09 8.8179188e-09 8.0261913e-09 7.2972712e-09
 6.6469883e-09 6.1136287e-09 5.4932938e-09 5.0863092e-09 4.6066515e-09
 4.1213419e-09 3.8034615e-09 3.4753251e-09 3.1730227e-09 2.8094294e-09
 2.6039815e-09 2.3805196e-09 2.1641933e-09 1.9399859e-09 1.7780868e-09
 1.6223194e-09 1.4460682e-09 1.3056771e-09 1.2155330e-09 1.1081478e-09
 9.9578201e-10 9.0343133e-10 8.1385287e-10 7.4060891e-10 6.7842276e-10
 6.2531158e-10 5.5074079e-10 5.0701632e-10 4.5823220e-10 4.1524695e-10
 3.9754361e-10 3.4897296e-10 3.0363362e-10 2.8299674e-10 2.7161090e-10
 2.4815128e-10 2.2151457e-10 1.9812157e-10 1.8106427e-10 1.6455276e-10
 1.5704052e-10 1.4543881e-10 1.3476309e-10 1.1033387e-10 1.0581422e-10
 9.8265111e-11 9.7957656e-11 8.9307790e-11 7.9625681e-11 7.1661704e-11
 6.7705910e-11 6.0097143e-11 6.6098106e-11 5.4725127e-11 5.6159816e-11
 5.3330181e-11 4.7114215e-11 4.7004373e-11 4.6857414e-11 4.4180319e-11
 4.3787085e-11 4.0279474e-11 3.6764619e-11 3.9901582e-11 3.7942965e-11
 3.5530918e-11 3.3638856e-11 3.5328115e-11 2.9510290e-11 3.1630372e-11
 3.2722190e-11 3.3904164e-11 3.0302913e-11 3.0901615e-11 3.4898660e-11
 2.9507057e-11 2.9207844e-11 3.1621650e-11 2.5904959e-11 2.4849064e-11
 2.6362534e-11 3.0122939e-11 2.7426289e-11 2.0955140e-11 3.2287041e-11
 2.9774842e-11 2.4897921e-11 2.7857298e-11 3.1706533e-11 2.6690903e-11
 2.6383952e-11 2.6378856e-11 2.9874159e-11 2.6762936e-11 2.9801279e-11
 2.4437594e-11 3.0827622e-11 2.9580907e-11 2.5317057e-11 2.5304751e-11
 2.5004812e-11 2.9130337e-11 2.4375092e-11 2.7472144e-11 2.2587779e-11
 2.6568518e-11 2.7676320e-11 2.9154096e-11 3.2487173e-11 2.9050061e-11
 2.6097142e-11 2.9513433e-11 2.2570643e-11 2.2878997e-11 2.9414613e-11
 3.0917373e-11 2.4040863e-11 2.1812115e-11 2.5652199e-11 2.4116126e-11
 3.0703867e-11 2.3849363e-11 2.9166107e-11 3.1755560e-11 2.5595462e-11
 2.6496324e-11 2.6363953e-11 2.4626735e-11 3.0212083e-11 3.1602512e-11
 3.1520935e-11 2.8427989e-11 2.3342755e-11 2.8223398e-11 2.4480929e-11
 2.8892087e-11 2.8937391e-11 2.7716999e-11 2.6319034e-11 3.1089707e-11
 2.6438990e-11 2.5194253e-11 2.8238075e-11 2.4925461e-11 2.1592539e-11
 2.8169821e-11 2.9219745e-11 2.6559484e-11 2.8081384e-11 2.6430306e-11
 2.7447995e-11 2.6299326e-11 2.9562856e-11 2.8381105e-11 2.4850606e-11
 3.1267471e-11 2.8501715e-11 2.8321378e-11 2.7770859e-11 2.0790054e-11
 2.7086828e-11 2.4930533e-11 2.5705662e-11 3.1449301e-11 2.4143555e-11
 2.7448014e-11 2.6937199e-11 2.7201741e-11 2.5434356e-11 2.5796216e-11
 2.8090237e-11 2.5655172e-11 2.6314902e-11 2.6630876e-11 3.1934271e-11
 2.3869802e-11 2.6720378e-11 3.0288994e-11 2.7958502e-11 2.6654361e-11
 2.8912655e-11 2.4292620e-11 2.4029037e-11 2.9006682e-11 3.3015753e-11
 2.7371997e-11 2.5640592e-11 2.9604902e-11 2.4946795e-11 2.7662553e-11
 2.4970770e-11 2.5602302e-11 2.2465172e-11 2.9513069e-11 2.5518141e-11
 2.5593271e-11 2.7511295e-11 2.5453490e-11 2.5772127e-11 3.1847584e-11
 2.7725319e-11 3.1251061e-11 2.1820715e-11 2.6975073e-11 2.4324827e-11
 2.8196639e-11 2.8062604e-11 2.4699649e-11 3.0750263e-11 3.1828033e-11
 2.7056963e-11 2.5564703e-11 3.3168843e-11 3.0148710e-11 2.5574865e-11
 2.6180075e-11 2.6590577e-11 2.9262169e-11 2.3625725e-11 3.2058415e-11
 2.5271866e-11 2.6414029e-11 2.6837216e-11 2.8464160e-11 2.7033136e-11
 2.4675359e-11 2.8153296e-11 2.8649281e-11 2.9466551e-11 2.8355065e-11
 2.7559462e-11 2.7919821e-11 2.7374960e-11 2.8848361e-11 2.6674196e-11
 2.6445944e-11 2.7074384e-11 3.0270553e-11 3.2027936e-11 2.7808880e-11
 2.8408973e-11 3.0374286e-11 2.5254406e-11 2.9934045e-11 2.9030781e-11
 2.6586705e-11 2.7060484e-11 2.6558880e-11 2.9112893e-11 2.8625394e-11
 3.1299519e-11 2.9341363e-11 3.4791767e-11 2.8110592e-11 3.0043652e-11
 2.8807814e-11 2.8004540e-11 2.8687674e-11 2.7675426e-11 2.5894892e-11
 3.0002997e-11 3.0872777e-11 2.8465230e-11 2.9196409e-11 2.5729422e-11
 2.9492929e-11 3.0758569e-11 2.7149182e-11 2.5998072e-11 2.7875226e-11
 3.3257494e-11 2.9300583e-11 2.5252810e-11 3.1578972e-11 3.1046141e-11
 3.1138571e-11 3.0466102e-11 3.0357657e-11 2.8802738e-11 3.3707884e-11
 3.1112331e-11 2.9731519e-11 3.0388213e-11 2.8528388e-11 2.9894906e-11
 2.8350154e-11 2.7897927e-11 3.1081610e-11 2.7907652e-11 2.7014443e-11
 2.9294261e-11 2.9399250e-11 3.1327468e-11 3.1747785e-11 3.1053680e-11
 2.9463869e-11 3.0217138e-11 2.6207817e-11 3.2128113e-11 2.8835993e-11
 2.9510817e-11 2.7003113e-11 2.3308695e-11 2.5072272e-11 2.7949868e-11
 2.9606106e-11 2.8345666e-11 3.1931721e-11 3.2075741e-11 2.9904718e-11
 3.0718379e-11 2.6953877e-11 3.1171288e-11 2.6382275e-11 2.9230247e-11
 2.9408719e-11 2.6289949e-11 2.7345098e-11 2.9861728e-11 2.7692296e-11
 2.2347905e-11 2.6272318e-11 2.8692465e-11 2.7744827e-11 3.0848157e-11
 2.3802606e-11 3.0111663e-11 3.2219106e-11 2.3343480e-11 2.9060910e-11
 2.5251547e-11 2.5013965e-11 2.5902443e-11 2.6232560e-11 2.7725476e-11
 2.5920304e-11 3.1445953e-11 2.7798729e-11 2.5696750e-11 2.7851405e-11
 2.5534600e-11 3.0064118e-11 2.9414599e-11 2.7819844e-11 2.7003727e-11
 3.0180081e-11 2.5896497e-11 2.6162284e-11 2.2465004e-11 2.6537514e-11
 2.5755138e-11 2.8583925e-11 2.9107640e-11 2.6865610e-11 2.8545926e-11
 3.1446908e-11 2.6056759e-11 2.6608988e-11 2.9972920e-11 2.6376398e-11
 2.0580260e-11 2.5318825e-11 2.7289849e-11 2.9083024e-11 3.1100233e-11
 2.6093067e-11 2.6029571e-11 2.5085099e-11 3.2420389e-11 2.9953554e-11
 2.8702707e-11 2.5459242e-11 3.0779289e-11 3.0392890e-11 2.6778708e-11
 3.3588111e-11 2.3502134e-11 2.6849904e-11 3.1076693e-11 2.5856072e-11
 2.5160915e-11 2.5142643e-11 2.4617577e-11 2.7099425e-11 3.0782314e-11
 2.9295052e-11 2.9621774e-11 2.9919477e-11 2.8734824e-11 2.6813073e-11
 2.6614849e-11 2.8596611e-11 2.8685944e-11 2.9214488e-11 2.9213958e-11
 2.9082649e-11 2.2565795e-11 2.8337008e-11 2.8578445e-11 2.7586655e-11
 2.3291126e-11 2.2848499e-11 3.0012253e-11 2.5423996e-11 2.9325598e-11
 2.8671288e-11 2.8833899e-11 2.5931715e-11 3.1588707e-11 2.3373480e-11
 2.6902366e-11 3.0393701e-11 2.8928832e-11 2.8716012e-11 2.1516174e-11
 3.2301131e-11 2.6358787e-11 3.0054591e-11 2.2165193e-11 2.5038627e-11
 2.3734875e-11 2.5991981e-11 2.4602848e-11 2.5788122e-11 2.8156902e-11
 2.7041236e-11 2.6783421e-11 3.1183608e-11 2.2326606e-11 2.9502234e-11
 3.0395436e-11 2.5399773e-11 3.0773446e-11 2.5563673e-11 2.1608472e-11
 2.4997209e-11 2.2991119e-11 2.6058367e-11 3.1303279e-11 2.2905372e-11
 2.3537705e-11 2.5785572e-11 2.7157171e-11 2.3571230e-11 2.4687922e-11
 2.2683140e-11 2.4582081e-11 2.1990989e-11 2.6294848e-11 2.7789633e-11
 2.2420203e-11 3.0093351e-11 2.9471203e-11 2.4475942e-11 2.6862040e-11
 2.4639927e-11 2.6052376e-11 2.4346612e-11 2.5321082e-11 2.4468852e-11
 2.6594763e-11 2.5452701e-11 2.5337846e-11 2.4843768e-11 3.0881325e-11
 2.7558929e-11 3.0169651e-11 2.6884110e-11 3.2055542e-11 2.3420901e-11
 2.3823506e-11 2.5314240e-11 3.1156449e-11 3.1678837e-11 2.5822185e-11
 2.5573188e-11 2.4721799e-11 2.2196953e-11 2.1565406e-11 2.6256899e-11
 2.9406731e-11 2.7446642e-11 2.5661417e-11 2.4179984e-11 3.0161873e-11
 2.4016188e-11 3.0244557e-11 2.3641242e-11 2.8525288e-11 2.4277131e-11
 2.3720706e-11 2.5200939e-11 2.7232071e-11 2.4993889e-11 3.0771264e-11
 2.6116626e-11 2.9001482e-11 3.0336747e-11 2.2969984e-11 2.3838910e-11
 2.0510586e-11 2.4462175e-11 3.0999567e-11 3.0235536e-11 3.2627259e-11
 2.1332262e-11 2.7817750e-11 2.5345698e-11 2.8626629e-11 3.0220288e-11
 2.9500048e-11 3.1821622e-11 2.5768176e-11 2.7103798e-11 2.8605868e-11
 2.9048446e-11 2.5339361e-11 3.2297675e-11 3.1859047e-11 2.9852981e-11
 2.8917322e-11 2.9375884e-11 3.2419199e-11 2.9542251e-11 3.0560429e-11
 2.8175955e-11 3.2822044e-11 3.4066919e-11 3.3679698e-11 3.7679262e-11
 3.2402407e-11 3.6387095e-11 3.4425685e-11 2.8246752e-11 3.9328322e-11
 4.2903049e-11 3.9144632e-11 4.2205028e-11 4.1965733e-11 4.6802520e-11
 5.2118906e-11 4.7793207e-11 5.3592630e-11 6.0983205e-11 5.8811164e-11
 6.6659706e-11 6.9809172e-11 7.3565577e-11 7.7668968e-11 8.8554268e-11
 8.5232703e-11 9.0153288e-11 9.7517147e-11 1.1255535e-10 1.1682405e-10
 1.2979959e-10 1.4231068e-10 1.5066651e-10 1.7444643e-10 1.7392024e-10
 1.9887686e-10 2.0551505e-10 2.2475224e-10 2.4729635e-10 2.7055327e-10
 3.0000014e-10 3.3089523e-10 3.5326203e-10 3.9176934e-10 4.2622952e-10
 4.9317705e-10 5.0188931e-10 5.5457194e-10 6.1260474e-10 6.7792560e-10
 7.4061240e-10 8.0361295e-10 9.2207475e-10 9.9801900e-10 1.0749297e-09
 1.1988759e-09 1.2985547e-09 1.4425843e-09 1.5607674e-09 1.7202466e-09
 1.9031621e-09 2.0792381e-09 2.2862503e-09 2.5148605e-09 2.7800615e-09
 3.0483420e-09 3.4056027e-09 3.6474197e-09 3.9884847e-09 4.4313877e-09
 4.8423923e-09 5.2885492e-09 5.7993157e-09 6.3342416e-09 7.0303896e-09
 7.6500619e-09 8.3907690e-09 9.0991854e-09]
../_images/dynamics_analysis_highdim_CANN_12_1.png
[10]:
# verify_fp_through_simulation(model, fps)
[13]:
visualize_fixed_points(fps, plot_ids=(10, 20, 30, 40, 50, 60, 70), xs=model.x)
../_images/dynamics_analysis_highdim_CANN_14_0.png
[12]:
verify_fixed_point_stability(model, fps, num=6)
../_images/dynamics_analysis_highdim_CANN_15_0.png
../_images/dynamics_analysis_highdim_CANN_15_1.png
../_images/dynamics_analysis_highdim_CANN_15_2.png
../_images/dynamics_analysis_highdim_CANN_15_3.png
../_images/dynamics_analysis_highdim_CANN_15_4.png
../_images/dynamics_analysis_highdim_CANN_15_5.png