In [15]:
%matplotlib inline
import numpy as np
import numpy.random as rnd
import matplotlib.pyplot as plt #, mpld3
plt.rcParams['figure.figsize'] = 12, 8
#mpld3.enable_notebook()
In [2]:
class Ising2D:
    def __init__(self, (Lx, Ly), (Jx, Jy), beta, local=False):
        self.Lx = Lx
        self.Ly = Ly
        self.Jx = Jx
        self.Jy = Jy
        self.beta = beta
        self.local = local
        self.config = 2*rnd.randint(2, size=(Ly, Lx)) - 1
    
    def run(self, num_thermal_steps, num_sample_steps):
        self.m_abs = np.zeros(num_sample_steps)
        self.m_sqr = np.zeros(num_sample_steps)
        acceptance_ratio = 0
        for _ in range(num_thermal_steps):
            self.step()
        for idx in range(num_sample_steps):
            acceptance_ratio += self.step()
            self.sample(idx)
        return acceptance_ratio / float(num_sample_steps)
    
    def step(self):
        if self.local:
            return self.local_step()
        return self.cluster_step()
    
    def local_step(self):
        accepted = 0
        for _ in range(self.Lx*self.Ly):
            x, y = self.randsite()
            old = self.config[y, x]
            left = self.config[y, (x-1) % self.Lx]
            right = self.config[y, (x+1) % self.Lx]
            up = self.config[(y-1) % self.Ly, x]
            down = self.config[(y+1) % self.Ly, x]
            delta_E = 2 * (self.Jx*left + self.Jx*right + self.Jy*up + self.Jy*down) * old
            if rnd.random() <= np.exp(-self.beta*delta_E):
                accepted += 1
                self.config[y, x] = -self.config[y, x]
        return accepted / float(self.Lx * self.Ly)
    
    def cluster_step(self):
        x, y = self.randsite()
        cluster = [(x, y)]
        s = self.config[y, x]
        self.config[y, x] = -s
        todo = self.neighbors((x, y))
        while todo:
            J, (x, y) = todo.pop()
            if self.config[y, x] != s:
                continue
            if (x, y) in cluster:
                continue
            if rnd.random() >= np.exp(-2*self.beta*J):
                self.config[y, x] = -s
                cluster += [(x, y)]
                todo += self.neighbors((x, y))
        return len(cluster)

    def randsite(self):
        return rnd.randint(self.Lx), rnd.randint(self.Ly)

    def neighbors(self, (x, y)):
        return [
            (self.Jx, ((x-1) % self.Lx, y)),
            (self.Jx, ((x+1) % self.Lx, y)),
            (self.Jy, (x, (y-1) % self.Ly)),
            (self.Jy, (x, (y+1) % self.Ly))
        ]
    
    def sample(self, idx):
        M = np.sum(self.config)
        m = M / float(self.Lx * self.Ly)
        self.m_abs[idx] = np.fabs(m)
        self.m_sqr[idx] = m*m

Test with known data for 1D Ising model: $J = 1, \beta = 0.3$ should give $\langle\lvert m\rvert\rangle \approx 0.24$

In [3]:
ising = Ising2D((1, 20), (0, 1), 0.3, local=True)
print 'LOCAL:'
print '  acceptance ratio =', ising.run(1000, 4096)
print '  <|m|> =', np.average(ising.m_abs)

ising = Ising2D((1, 20), (0, 1), 0.3, local=False)
print 'CLUSTER:'
print '  avg. cluster size =', ising.run(1000, 4096)
print '  <|m|> =', np.average(ising.m_abs)
LOCAL:
  acceptance ratio = 0.708312988281
  <|m|> = 0.243090820313
CLUSTER:
  avg. cluster size = 1.82006835938
  <|m|> = 0.241870117188
In [4]:
def errors_binning(samples, min_bins=128):
    l_max = int(round(np.log2(len(samples) / min_bins)))
    assert min_bins * 2**l_max == len(samples)
    errors = np.empty(l_max + 1)
    for l in range(l_max + 1):
        errors[l] = np.std(samples, ddof=1) / np.sqrt(len(samples))
        samples = (samples[::2] + samples[1::2]) / 2.0
    return errors

def tau_int(errors):
    return 0.5 * (errors[-1]**2 / errors[0]**2 - 1)

def avg(samples):
    """returns triple (mean, error, autocorrelation time)"""
    mean = np.average(samples)
    errors = errors_binning(samples)
    return mean, errors[-1], tau_int(errors)

avg(ising.m_abs)
Out[4]:
(0.24187011718750373, 0.0056166229046184556, 1.3885544223184922)

Classical 2D Ising

In [18]:
%%time
Js = np.linspace(0.1, 1, 10)
m_sqr_local = np.zeros((len(Js), 3))
m_sqr_cluster = np.zeros((len(Js), 3))
cluster_sizes = []
L = 10
for i, J in enumerate(Js):
    print J
    
    ising = Ising2D((L, L), (J, J), beta=1, local=True)
    print 'avg acceptance =', ising.run(1024, 4096)
    m_sqr_local[i, :] = avg(ising.m_sqr)

    ising = Ising2D((L, L), (J, J), beta=1, local=False)
    cs = ising.run(1024, 4096)
    print 'avg cluster size =', cs
    m_sqr_cluster[i, :] = avg(ising.m_sqr)
    cluster_sizes.append(cs)
0.1
avg acceptance = 0.848618164063
avg cluster size = 1.55590820312
0.2
avg acceptance = 0.691508789063
avg cluster size = 2.8486328125
0.3
avg acceptance = 0.524836425781
avg cluster size = 6.9638671875
0.4
avg acceptance = 0.303642578125
avg cluster size = 35.1789550781
0.5
avg acceptance = 0.0784130859375
avg cluster size = 84.4919433594
0.6
avg acceptance = 0.0262451171875
avg cluster size = 95.2746582031
0.7
avg acceptance = 0.009853515625
avg cluster size = 97.9597167969
0.8
avg acceptance = 0.0039453125
avg cluster size = 99.2927246094
0.9
avg acceptance = 0.0015771484375
avg cluster size = 99.7456054688
1.0
avg acceptance = 0.000673828125
avg cluster size = 99.8806152344
Wall time: 2min 1s
In [19]:
_, ax0 = plt.subplots()
ax2 = ax0.twinx()
ax0.set_title('Local vs. Cluster Updates')
ax0.set_xlabel(r'$\beta J$')
ax0.set_ylabel(r'$\langle m^2 \rangle$')
ax0.errorbar(Js, m_sqr_local[:,0], fmt='g.-', yerr=m_sqr_local[:,1], label='local')
ax0.errorbar(Js, m_sqr_cluster[:,0], fmt='b.-', yerr=m_sqr_cluster[:,1], label='cluster')
# ax2.set_ylabel(r'autocorrelation time $\tau$')
# ax2.plot(Js, m_sqr_local[:,2], 'g:')
# ax2.plot(Js, m_sqr_cluster[:,2], 'b:')
ax2.set_ylabel(r'avg. cluster size')
ax2.plot(Js, cluster_sizes, 'r')
ax0.legend()
plt.show()

Quantum $\mapsto$ Classical

In [7]:
%%time
def quantum_1d_ising(L, M, J_over_Gamma, Delta):
    J, Gamma = J_over_Gamma, 1.0
    Jx = Delta * J
    Jy = -0.5*np.log(Delta * Gamma)
    return Ising2D((L, M), (Jx, Jy), beta=1)

Js = np.linspace(0.5, 2, 5)
Delta = 0.05
colors = ['r', 'g', 'b', 'y']
LMs = [8, 16, 24]
betas = []
m_abses = []
for L in LMs:
    M = L
    print 'L = M =', L
    betas.append(M * Delta)
    m_abs = np.zeros((len(Js), 3))    
    for i, J in enumerate(Js):
        print '  J =', J
        ising = quantum_1d_ising(L, M, J, Delta)
        print '  -> avg. cluster size =', ising.run(1024, 4096)
        m_abs[i, :] = avg(ising.m_abs)
    m_abses.append(m_abs)
L = M = 8
  J = 0.5
  -> avg. cluster size = 11.0197753906
  J = 0.875
  -> avg. cluster size = 14.7421875
  J = 1.25
  -> avg. cluster size = 19.94921875
  J = 1.625
  -> avg. cluster size = 26.572265625
  J = 2.0
  -> avg. cluster size = 33.7612304688
L = M = 16
  J = 0.5
  -> avg. cluster size = 26.9194335938
  J = 0.875
  -> avg. cluster size = 45.6176757812
  J = 1.25
  -> avg. cluster size = 79.9128417969
  J = 1.625
  -> avg. cluster size = 133.574707031
  J = 2.0
  -> avg. cluster size = 189.163818359
L = M = 24
  J = 0.5
  -> avg. cluster size = 39.5415039062
  J = 0.875
  -> avg. cluster size = 85.7145996094
  J = 1.25
  -> avg. cluster size = 193.372558594
  J = 1.625
  -> avg. cluster size = 387.088378906
  J = 2.0
  -> avg. cluster size = 500.184082031
Wall time: 3min 41s
In [17]:
_, ax0 = plt.subplots()
ax0.set_title('$|m|$')
ax2 = ax0.twinx()
ax0.set_xlabel(r'$\beta J$')
ax0.set_ylabel(r'$\langle |m| \rangle$')
ax2.set_ylabel(r'autocorrelation time $\tau$')
for color, beta, m_abs in zip(colors, betas, m_abses):
    ax0.errorbar(Js, m_abs[:,0], fmt=color + '.-', yerr=m_abs[:,1], label=r'$\beta = %f$' % beta)
    ax2.plot(Js, m_abs[:,2], color + ':')
ax0.legend()
plt.show()