Continuous-attractor Neural Network

Colab Open in Kaggle

[1]:
import matplotlib.pyplot as plt
import numpy as np
from sklearn.decomposition import PCA
[2]:
import brainpy as bp
import brainpy.math as bm

bm.set_platform('cpu')
[3]:
bp.__version__
[3]:
'2.4.3'

Model

[4]:
class CANN1D(bp.dyn.NeuDyn):
  def __init__(self, num, tau=1., k=8.1, a=0.5, A=10., J0=4.,
               z_min=-bm.pi, z_max=bm.pi, **kwargs):
    super().__init__(size=num, **kwargs)

    # parameters
    self.tau = tau  # The synaptic time constant
    self.k = k  # Degree of the rescaled inhibition
    self.a = a  # Half-width of the range of excitatory connections
    self.A = A  # Magnitude of the external input
    self.J0 = J0  # maximum connection value

    # feature space
    self.z_min = z_min
    self.z_max = z_max
    self.z_range = z_max - z_min
    self.x = bm.linspace(z_min, z_max, num)  # The encoded feature values
    self.rho = num / self.z_range  # The neural density
    self.dx = self.z_range / num  # The stimulus density

    # variables
    self.u = bm.Variable(bm.zeros(num))
    self.input = bm.Variable(bm.zeros(num))

    # The connection matrix
    self.conn_mat = self.make_conn(self.x)

    # function
    self.integral = bp.odeint(self.derivative)

  def derivative(self, u, t, Iext):
    r1 = bm.square(u)
    r2 = 1.0 + self.k * bm.sum(r1)
    r = r1 / r2
    Irec = bm.dot(self.conn_mat, r)
    du = (-u + Irec + Iext) / self.tau
    return du

  def dist(self, d):
    d = bm.remainder(d, self.z_range)
    d = bm.where(d > 0.5 * self.z_range, d - self.z_range, d)
    return d

  def make_conn(self, x):
    assert bm.ndim(x) == 1
    x_left = bm.reshape(x, (-1, 1))
    x_right = bm.repeat(x.reshape((1, -1)), len(x), axis=0)
    d = self.dist(x_left - x_right)
    Jxx = self.J0 * bm.exp(-0.5 * bm.square(d / self.a)) / (bm.sqrt(2 * bm.pi) * self.a)
    return Jxx

  def get_stimulus_by_pos(self, pos):
    return self.A * bm.exp(-0.25 * bm.square(self.dist(self.x - pos) / self.a))

  def update(self):
    self.u.value = self.integral(self.u, bp.share['t'], self.input, bp.share['dt'])
    self.input[:] = 0.

Find fixed points

[5]:
model = CANN1D(num=512, k=0.1, A=30, a=0.5)
[6]:
candidates = model.get_stimulus_by_pos(bm.arange(-bm.pi, bm.pi, 0.005).reshape((-1, 1)))

finder = bp.analysis.SlowPointFinder(f_cell=model, target_vars={'u': model.u})
finder.find_fps_with_gd_method(
  candidates={'u': candidates},
  tolerance=1e-6,
  num_batch=200,
  optimizer=bp.optim.Adam(lr=bp.optim.ExponentialDecay(0.1, 2, 0.999)),
)
finder.filter_loss(1e-7)
finder.keep_unique()
# finder.exclude_outliers(tolerance=1e1)

print('Losses of fixed points:')
print(finder.losses)
Optimizing with Adam(lr=ExponentialDecay(0.1, decay_steps=2, decay_rate=0.999), last_call=-1), beta1=0.9, beta2=0.999, eps=1e-08) to find fixed points:
    Batches 1-200 in 1.05 sec, Training loss 0.0003816649
    Batches 201-400 in 1.00 sec, Training loss 0.0000117727
    Batches 401-600 in 1.07 sec, Training loss 0.0000027707
    Batches 601-800 in 1.03 sec, Training loss 0.0000026347
    Batches 801-1000 in 0.95 sec, Training loss 0.0000026309
    Batches 1001-1200 in 1.01 sec, Training loss 0.0000026283
    Batches 1201-1400 in 1.00 sec, Training loss 0.0000026258
    Batches 1401-1600 in 1.00 sec, Training loss 0.0000026233
    Batches 1601-1800 in 1.04 sec, Training loss 0.0000026209
    Batches 1801-2000 in 0.97 sec, Training loss 0.0000026185
    Batches 2001-2200 in 1.02 sec, Training loss 0.0000026162
    Batches 2201-2400 in 0.98 sec, Training loss 0.0000026141
    Batches 2401-2600 in 1.01 sec, Training loss 0.0000026120
    Batches 2601-2800 in 1.05 sec, Training loss 0.0000026100
    Batches 2801-3000 in 1.03 sec, Training loss 0.0000026081
    Batches 3001-3200 in 0.97 sec, Training loss 0.0000026063
    Batches 3201-3400 in 1.10 sec, Training loss 0.0000026047
    Batches 3401-3600 in 1.06 sec, Training loss 0.0000026031
    Batches 3601-3800 in 1.05 sec, Training loss 0.0000026016
    Batches 3801-4000 in 1.13 sec, Training loss 0.0000026003
    Batches 4001-4200 in 1.10 sec, Training loss 0.0000025990
    Batches 4201-4400 in 1.04 sec, Training loss 0.0000025979
    Batches 4401-4600 in 1.06 sec, Training loss 0.0000025968
    Batches 4601-4800 in 1.06 sec, Training loss 0.0000025958
    Batches 4801-5000 in 1.08 sec, Training loss 0.0000025948
    Batches 5001-5200 in 1.16 sec, Training loss 0.0000025940
    Batches 5201-5400 in 1.06 sec, Training loss 0.0000025932
    Batches 5401-5600 in 1.09 sec, Training loss 0.0000025925
    Batches 5601-5800 in 1.10 sec, Training loss 0.0000025919
    Batches 5801-6000 in 1.00 sec, Training loss 0.0000025913
    Batches 6001-6200 in 1.08 sec, Training loss 0.0000025907
    Batches 6201-6400 in 1.05 sec, Training loss 0.0000025902
    Batches 6401-6600 in 1.05 sec, Training loss 0.0000025898
    Batches 6601-6800 in 1.06 sec, Training loss 0.0000025894
    Batches 6801-7000 in 1.04 sec, Training loss 0.0000025890
    Batches 7001-7200 in 1.08 sec, Training loss 0.0000025887
    Batches 7201-7400 in 1.06 sec, Training loss 0.0000025884
    Batches 7401-7600 in 1.09 sec, Training loss 0.0000025881
    Batches 7601-7800 in 1.10 sec, Training loss 0.0000025878
    Batches 7801-8000 in 1.00 sec, Training loss 0.0000025876
    Batches 8001-8200 in 1.05 sec, Training loss 0.0000025874
    Batches 8201-8400 in 1.10 sec, Training loss 0.0000025872
    Batches 8401-8600 in 1.01 sec, Training loss 0.0000025871
    Batches 8601-8800 in 1.10 sec, Training loss 0.0000025869
    Batches 8801-9000 in 1.08 sec, Training loss 0.0000025868
    Batches 9001-9200 in 1.09 sec, Training loss 0.0000025867
    Batches 9201-9400 in 1.08 sec, Training loss 0.0000025866
    Batches 9401-9600 in 1.06 sec, Training loss 0.0000025865
    Batches 9601-9800 in 1.06 sec, Training loss 0.0000025864
    Batches 9801-10000 in 1.06 sec, Training loss 0.0000025864
Excluding fixed points with squared speed above tolerance 1e-07:
    Kept 796/1257 fixed points with tolerance under 1e-07.
Excluding non-unique fixed points:
    Kept 796/796 unique fixed points with uniqueness tolerance 0.025.
Losses of fixed points:
[6.65999265e-13 6.06011330e-09 2.38085285e-08 5.33473568e-08
 9.45478433e-08 9.76091741e-08 9.16885483e-08 8.61575415e-08
 8.09044991e-08 7.59645360e-08 7.12713586e-08 6.68581208e-08
 6.26903471e-08 5.88003104e-08 5.50905099e-08 5.16032301e-08
 4.83198406e-08 4.52541258e-08 4.23480842e-08 3.96223072e-08
 3.70487712e-08 3.46303040e-08 3.23487406e-08 3.02574215e-08
 2.82472659e-08 2.63589506e-08 2.46044749e-08 2.29731363e-08
 2.14184297e-08 1.99687626e-08 1.86055527e-08 1.73432770e-08
 1.61649201e-08 1.50516115e-08 1.39988554e-08 1.30286271e-08
 1.21405392e-08 1.12824772e-08 1.04854365e-08 9.73788694e-09
 9.05894026e-09 8.41076098e-09 7.80433673e-09 7.24325000e-09
 6.71769840e-09 6.23869001e-09 5.78714587e-09 5.36283240e-09
 4.96798558e-09 4.60364724e-09 4.26138635e-09 3.94648136e-09
 3.65370711e-09 3.37913297e-09 3.12092130e-09 2.88610558e-09
 2.66823008e-09 2.46409915e-09 2.28175923e-09 2.10462936e-09
 1.93792205e-09 1.79577042e-09 1.65449099e-09 1.52192681e-09
 1.40910150e-09 1.29147426e-09 1.19493970e-09 1.09396570e-09
 1.01234499e-09 9.35288291e-10 8.56214044e-10 7.89207921e-10
 7.28913485e-10 6.69647671e-10 6.10061723e-10 5.61333868e-10
 5.16059973e-10 4.73754591e-10 4.36389869e-10 3.99210220e-10
 3.67217146e-10 3.34785144e-10 3.07399106e-10 2.83443435e-10
 2.59649496e-10 2.38027875e-10 2.17810200e-10 1.98874819e-10
 1.81018270e-10 1.67016345e-10 1.51877580e-10 1.39265835e-10
 1.27471325e-10 1.17848092e-10 1.06978613e-10 9.66372538e-11
 8.97846728e-11 8.12829318e-11 7.50615126e-11 6.72356892e-11
 6.24652205e-11 5.62219646e-11 5.22908453e-11 4.70223305e-11
 4.30399986e-11 3.91286899e-11 3.50259058e-11 3.25584594e-11
 3.00176099e-11 2.76452854e-11 2.42934301e-11 2.24256152e-11
 2.08410719e-11 1.81344766e-11 1.68645965e-11 1.53752931e-11
 1.47273513e-11 1.32494779e-11 1.14755011e-11 1.06543047e-11
 9.91087247e-12 8.97903626e-12 8.52114386e-12 6.92675344e-12
 6.59329882e-12 6.03664341e-12 5.53571902e-12 5.49512172e-12
 4.99953551e-12 4.72949978e-12 4.16093332e-12 3.76199072e-12
 3.23529007e-12 3.02890091e-12 2.89850218e-12 2.55219110e-12
 2.51337579e-12 2.44126854e-12 2.36818638e-12 2.23640442e-12
 2.12707434e-12 2.07834292e-12 1.79407606e-12 1.77597260e-12
 1.69694900e-12 1.50974827e-12 1.61576915e-12 9.61236407e-13
 9.45610127e-13 9.31541302e-13 7.85112481e-13 7.84306105e-13
 8.17820367e-13 8.18782650e-13 8.45323811e-13 8.26449748e-13
 8.26894163e-13 8.39223276e-13 8.33111249e-13 8.44102457e-13
 8.44635288e-13 8.40655562e-13 8.31335055e-13 8.26000618e-13
 8.49426866e-13 8.20591208e-13 8.05958327e-13 8.25581682e-13
 8.03848795e-13 8.22364095e-13 8.16704831e-13 8.09510445e-13
 8.11398149e-13 8.02072493e-13 8.02413041e-13 8.02161126e-13
 7.92166137e-13 8.01655834e-13 7.64215731e-13 7.71105890e-13
 7.85981415e-13 7.68436150e-13 7.89083100e-13 7.63772184e-13
 7.66247634e-13 7.60997114e-13 7.73206802e-13 7.55203083e-13
 7.45925240e-13 7.35161986e-13 7.39542867e-13 7.32881529e-13
 7.53341888e-13 6.77314921e-13 6.59250866e-13 7.43982458e-13
 7.39151850e-13 7.48206401e-13 6.76980499e-13 6.76563406e-13
 6.79082983e-13 6.72961144e-13 6.78207002e-13 6.66544402e-13
 6.60853750e-13 6.86604744e-13 6.72628077e-13 7.46208000e-13
 6.79728301e-13 6.87082715e-13 6.61104309e-13 6.86748726e-13
 6.64101966e-13 6.64768100e-13 6.75564205e-13 6.78312766e-13
 6.85728655e-13 6.64101911e-13 6.62436577e-13 6.85665934e-13
 6.79978155e-13 6.77873502e-13 6.72955940e-13 6.80082184e-13
 6.77873881e-13 6.80061367e-13 6.69124586e-13 6.71873147e-13
 6.74287882e-13 6.60914899e-13 6.79644709e-13 6.84084788e-13
 6.65433854e-13 6.71212218e-13 6.78754795e-13 6.72955615e-13
 6.85417055e-13 6.62436252e-13 6.63995822e-13 6.64439912e-13
 6.89206559e-13 6.92938870e-13 6.85834202e-13 6.78206731e-13
 6.80414980e-13 6.72955615e-13 6.95603406e-13 6.86833403e-13
 6.77873502e-13 6.62436252e-13 6.89289771e-13 6.87081143e-13
 6.86833403e-13 6.85415754e-13 6.81747193e-13 6.80081859e-13
 6.86748293e-13 6.69347336e-13 6.94271030e-13 6.72622494e-13
 6.74287828e-13 6.87081414e-13 6.87166415e-13 6.68042011e-13
 6.81747139e-13 6.72877498e-13 6.74626099e-13 6.90205651e-13
 6.77895240e-13 6.80414872e-13 6.85749146e-13 6.81752398e-13
 6.72955615e-13 6.77895240e-13 6.95957994e-13 6.86082213e-13
 6.80414872e-13 6.89289717e-13 6.66770892e-13 6.85501081e-13
 6.78206189e-13 6.80303849e-13 6.82184290e-13 6.78206189e-13
 6.95937123e-13 6.85748767e-13 6.53915236e-13 6.81414072e-13
 6.92937840e-13 6.95937123e-13 6.76562972e-13 6.81747139e-13
 6.80087009e-13 6.85665825e-13 6.94604855e-13 6.76895985e-13
 6.80414817e-13 6.72960711e-13 6.64106736e-13 6.88956650e-13
 6.68375078e-13 6.86625127e-13 6.76624501e-13 6.72206106e-13
 6.71290172e-13 6.59604316e-13 6.90164018e-13 6.77873393e-13
 6.60104675e-13 6.90538664e-13 6.75208533e-13 6.78312333e-13
 6.78206189e-13 6.81747085e-13 6.72955506e-13 6.76561997e-13
 6.80060934e-13 6.83751666e-13 6.78416470e-13 6.95479210e-13
 6.94270162e-13 6.79644654e-13 6.76562972e-13 6.65433745e-13
 6.71628443e-13 6.71565993e-13 6.80394001e-13 6.58938128e-13
 6.64018211e-13 6.62691148e-13 6.89274050e-13 6.72955506e-13
 6.68125223e-13 6.79644654e-13 6.94270108e-13 6.73205307e-13
 6.90538664e-13 6.87082281e-13 6.78645399e-13 6.76873921e-13
 6.80414872e-13 6.71623293e-13 6.78206189e-13 6.95957940e-13
 6.92521073e-13 6.62769210e-13 7.28411147e-13 6.76874247e-13
 6.80310734e-13 6.76646239e-13 6.71873039e-13 6.71290172e-13
 6.81002943e-13 6.62436197e-13 7.36903485e-13 6.78749483e-13
 6.74292978e-13 6.62774414e-13 7.36959864e-13 6.69790558e-13
 6.64101478e-13 6.75292179e-13 6.71227722e-13 6.95146089e-13
 6.75230650e-13 6.76647052e-13 6.76541126e-13 6.71893856e-13
 7.01404700e-13 6.66687625e-13 6.75647797e-13 6.86081834e-13
 6.80081750e-13 7.26218456e-13 6.76894684e-13 6.76646998e-13
 6.76542156e-13 6.71540026e-13 7.06733771e-13 6.67770472e-13
 6.78312333e-13 6.84417800e-13 6.78749537e-13 6.62358081e-13
 6.72877498e-13 6.80394055e-13 6.68375023e-13 6.86604311e-13
 6.78733870e-13 6.78726931e-13 6.62352877e-13 6.68458290e-13
 6.78312333e-13 6.78206569e-13 6.80414817e-13 6.62685998e-13
 6.78228307e-13 6.76980119e-13 7.38658158e-13 6.85605110e-13
 6.72955561e-13 6.85415700e-13 6.79977667e-13 7.23392266e-13
 6.71873039e-13 6.78395599e-13 6.85749092e-13 6.87166361e-13
 6.83751612e-13 6.71873039e-13 6.64018211e-13 6.68436606e-13
 6.72955561e-13 7.29020956e-13 6.80414817e-13 6.63024215e-13
 6.71899060e-13 6.74287774e-13 6.93937963e-13 6.76980119e-13
 6.86748347e-13 6.62441347e-13 6.62685943e-13 6.76895985e-13
 6.65433745e-13 6.82397661e-13 7.47672811e-13 6.53915182e-13
 7.42210438e-13 6.78312333e-13 6.84728695e-13 6.61436997e-13
 6.72955561e-13 6.67770093e-13 6.82087091e-13 6.76873921e-13
 7.67185415e-13 7.24358832e-13 6.68103539e-13 6.69458304e-13
 6.66709689e-13 6.61658987e-13 6.62691148e-13 6.59166244e-13
 7.33351476e-13 6.78561320e-13 6.74287774e-13 7.17369632e-13
 7.51203570e-13 6.55247449e-13 6.67126077e-13 6.68459103e-13
 6.76874247e-13 6.79165816e-13 7.19085273e-13 6.68125277e-13
 7.48894544e-13 6.85749092e-13 6.74620841e-13 6.72955506e-13
 6.86997768e-13 6.77646253e-13 6.68352960e-13 6.78499737e-13
 6.76411130e-13 6.78206460e-13 7.42233315e-13 6.68375023e-13
 7.47451363e-13 6.64018211e-13 6.78206514e-13 6.69458304e-13
 6.77895240e-13 6.72872294e-13 6.53920440e-13 6.71566047e-13
 7.14089269e-13 6.87082335e-13 6.69874638e-13 6.62358081e-13
 6.62774414e-13 6.64434545e-13 6.68042011e-13 6.85521843e-13
 6.69352486e-13 6.79082550e-13 6.90538664e-13 7.35016160e-13
 6.69458304e-13 7.42127171e-13 7.50782032e-13 6.72622494e-13
 6.85748767e-13 6.87499428e-13 6.87081034e-13 6.81747085e-13
 6.63435344e-13 6.94270054e-13 7.35433361e-13 6.76562972e-13
 6.80414817e-13 6.74287774e-13 6.81086155e-13 6.76980065e-13
 6.76562972e-13 6.72955561e-13 6.62691202e-13 6.64106682e-13
 6.64018211e-13 6.87082281e-13 6.79644654e-13 6.86748347e-13
 6.81752289e-13 6.83079353e-13 6.75563772e-13 6.78645454e-13
 6.92604828e-13 6.64101532e-13 6.80414872e-13 6.87081034e-13
 6.79977721e-13 6.78206189e-13 6.70540826e-13 6.81414072e-13
 6.76873921e-13 6.86750136e-13 7.58670579e-13 6.70207759e-13
 6.72955561e-13 6.69435861e-13 6.70124492e-13 6.78561374e-13
 6.80414872e-13 7.49371593e-13 6.76874301e-13 6.70540826e-13
 6.78228307e-13 6.71290226e-13 6.72960765e-13 6.55252708e-13
 6.80414872e-13 6.84084733e-13 6.78645508e-13 6.76874409e-13
 6.74287882e-13 7.09759508e-13 6.78228361e-13 6.79644709e-13
 6.85749200e-13 6.81747193e-13 6.72955615e-13 6.94519637e-13
 6.87166524e-13 6.78206298e-13 6.77750445e-13 6.85542823e-13
 6.78206623e-13 6.79977830e-13 6.70456854e-13 6.62658296e-13
 6.65766921e-13 6.85749255e-13 6.80081859e-13 6.78228361e-13
 6.62769319e-13 6.72877552e-13 6.72544485e-13 6.72955615e-13
 6.84084788e-13 6.61437051e-13 6.78206623e-13 6.70296609e-13
 6.81747519e-13 6.77895619e-13 6.78645833e-13 6.76874735e-13
 6.72955994e-13 6.80082238e-13 6.77873556e-13 6.78312766e-13
 7.43987337e-13 7.60108176e-13 6.80415251e-13 6.91522360e-13
 6.78312820e-13 7.54756392e-13 6.71290606e-13 6.81747519e-13
 6.85749580e-13 7.29577314e-13 7.10258675e-13 6.56913217e-13
 7.47540268e-13 7.54339733e-13 7.44398792e-13 7.53584261e-13
 6.81747519e-13 7.40518215e-13 6.72961144e-13 6.68875870e-13
 6.75231084e-13 7.58775963e-13 7.62333230e-13 7.62329002e-13
 7.47563957e-13 7.64241860e-13 7.68880239e-13 7.58670850e-13
 7.66436610e-13 7.76870322e-13 7.97190439e-13 7.77563669e-13
 8.07823318e-13 7.70543948e-13 7.81200246e-13 7.93420397e-13
 8.03848795e-13 8.04319881e-13 7.78006023e-13 8.04292451e-13
 8.09515541e-13 8.07931196e-13 8.13174398e-13 8.05957731e-13
 8.13152388e-13 8.18372497e-13 8.02072438e-13 8.11396306e-13
 8.20587738e-13 8.31334892e-13 8.38773061e-13 8.29996445e-13
 8.34909941e-13 8.40988629e-13 8.39217476e-13 8.38768779e-13
 8.39322318e-13 8.45987397e-13 8.37552033e-13 8.33891495e-13
 8.40993507e-13 8.34414027e-13 8.28647264e-13 8.13457483e-13
 8.06407946e-13 7.95583001e-13 9.41124132e-13 9.79323827e-13
 1.04366017e-12 1.01703628e-12 1.55415351e-12 1.55973064e-12
 1.75677268e-12 1.97825675e-12 2.10312323e-12 2.08514369e-12
 2.22232389e-12 2.38751120e-12 2.47662741e-12 2.52247355e-12
 2.54453078e-12 2.65023550e-12 2.83581665e-12 2.90924641e-12
 3.10262102e-12 4.07886658e-12 4.07832362e-12 4.95156520e-12
 5.22558212e-12 5.52420349e-12 5.55981303e-12 5.85516271e-12
 6.68958205e-12 6.90074517e-12 8.48250983e-12 9.09801921e-12
 9.81412868e-12 1.01604645e-11 1.13349807e-11 1.33576761e-11
 1.39126011e-11 1.52543360e-11 1.61737915e-11 1.76799790e-11
 2.04291237e-11 2.17683579e-11 2.36094096e-11 2.59116773e-11
 2.88016989e-11 3.12964966e-11 3.37667741e-11 3.76206635e-11
 4.08933720e-11 4.50029319e-11 5.01595189e-11 5.36318490e-11
 6.04010175e-11 6.47649157e-11 7.07944259e-11 7.83746332e-11
 8.46290538e-11 9.34271965e-11 1.02205598e-10 1.11236756e-10
 1.20586402e-10 1.32406017e-10 1.45912754e-10 1.57893060e-10
 1.73145345e-10 1.90823704e-10 2.07872386e-10 2.26513391e-10
 2.46359877e-10 2.69641032e-10 2.95570513e-10 3.20299870e-10
 3.51708329e-10 3.82457233e-10 4.14832335e-10 4.53349358e-10
 4.92907659e-10 5.39130851e-10 5.84944204e-10 6.35859587e-10
 6.94486135e-10 7.50536078e-10 8.18828783e-10 8.88272678e-10
 9.66443037e-10 1.04974163e-09 1.14148735e-09 1.23731025e-09
 1.33976252e-09 1.45741863e-09 1.57475388e-09 1.71376779e-09
 1.85829607e-09 2.00666173e-09 2.17927409e-09 2.35506326e-09
 2.55003352e-09 2.75939627e-09 2.98689229e-09 3.22779359e-09
 3.49273721e-09 3.77717413e-09 4.06882350e-09 4.41011672e-09
 4.74947770e-09 5.12270759e-09 5.53060442e-09 5.96623018e-09
 6.42973896e-09 6.93385704e-09 7.47874296e-09 8.05466449e-09
 8.67119088e-09 9.32358901e-09 1.00421147e-08 1.08030491e-08
 1.16187726e-08 1.24903092e-08 1.34355682e-08 1.44321213e-08
 1.54916400e-08 1.66318657e-08 1.78525674e-08 1.91507539e-08
 2.05502513e-08 2.20345182e-08 2.36279476e-08 2.53021000e-08
 2.71166076e-08 2.90485005e-08 3.10840207e-08 3.32737500e-08
 3.55938887e-08 3.80514464e-08 4.06955749e-08 4.34892549e-08
 4.64846650e-08 4.96325185e-08 5.29930091e-08 5.65101779e-08
 6.03326100e-08 6.43396731e-08 6.86328434e-08 7.31181160e-08
 7.79278864e-08 8.29958395e-08 8.83774334e-08 9.40493692e-08
 7.70019071e-08 4.03541378e-08 1.54598254e-08 2.29774755e-09]

Visualize fixed points

[7]:
pca = PCA(2)
fp_pcs = pca.fit_transform(finder.fixed_points['u'])
plt.plot(fp_pcs[:, 0], fp_pcs[:, 1], 'x', label='fixed points')
plt.xlabel('PC 1')
plt.ylabel('PC 2')
plt.title('Fixed points PCA')
plt.legend()
plt.show()
../_images/dynamics_analysis_highdim_CANN_10_0.png
[8]:
fps = finder.fixed_points['u']
plot_ids = (10, 100, 200, 300,)
plot_ids = np.asarray(plot_ids)

for i in plot_ids:
  plt.plot(model.x, fps[i], label=f'FP-{i}')
plt.legend()
plt.show()
../_images/dynamics_analysis_highdim_CANN_11_0.png

Verify the stabilities of fixed points

[9]:
from jax.tree_util import tree_map

_ = finder.compute_jacobians(
  tree_map(lambda x: x[plot_ids], finder._fixed_points),
  plot=True
)
C:\Users\adadu\miniconda3\envs\brainpy\lib\site-packages\jax\_src\numpy\array_methods.py:329: FutureWarning: The arr.split() method is deprecated. Use jax.numpy.split instead.
  warnings.warn(
../_images/dynamics_analysis_highdim_CANN_13_1.png