%matplotlib inline %load_ext autoreload %autoreload 2 import numpy as np import matplotlib.pyplot as plt from pymks import MKSRegressionModel from pymks import FastMKSRegressionModel from pymks import FiPyCHModel from sklearn import metrics mse = metrics.mean_squared_error from IPython.parallel import Client rc = Client() dview = rc[:] #serial_result = map(lambda x:x**10, range(32)) #parallel_result = dview.map_sync(lambda x: x**10, range(32)) np.random.seed(101) X = [np.random.random((100, 11, 11)) for x in range(32)] #fipymodel = FiPyCHModel(dx=dx, dy=dx, dt=dt, a=1.) def fipymodel(X): from pymks import FiPyCHModel dx = 0.25 dt = 1e-3 fipymodel = FiPyCHModel(dx=dx, dy=dx, dt=dt, a=1.) return fipymodel.predict(X) dview['fipymodel'] = fipymodel #y = fipymodel.predict(X) #y = dview.map_sync(lambda x: x, X) y = dview.map_sync(lambda x: fipymodel(x), X) y = np.concatenate(y, axis=0) X = np.concatenate(X, axis=0) print y.shape model = MKSRegressionModel(Nbin=5) model.fit(X, y) coeff = np.fft.ifftn(model.Fcoeff, axes=(0, 1)).real Nroll = coeff.shape[0] / 2 coeff_rolled = np.roll(np.roll(coeff[:,:,0], Nroll, axis=0), Nroll, axis=1) plt.contourf(coeff_rolled, 250) plt.colorbar() from sklearn.cross_validation import train_test_split X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=3) from sklearn.grid_search import GridSearchCV tuned_parameters = [{'Nbin': np.arange(2, 20)}] def neg_mse(a, b): return -mse(a, b) gridSearch = GridSearchCV(MKSRegressionModel(Nbin=10), tuned_parameters, cv=5, score_func=neg_mse) gridSearch.fit(X_train, y_train) print(gridSearch.best_estimator_) for params, mean_score, scores in gridSearch.grid_scores_: print("%0.5f (+/-%0.5f) for %r"% (mean_score, scores.std() / 2, params)) y_true, y_pred = y_test, gridSearch.predict(X_test) print(mse(y_true, y_pred)) np.random.seed(11) X0 = np.random.random((1, 21, 21)) dt = 1e-2 dx = 0.25 fipymodel = FiPyCHModel(dx=dx, dy=dx, dt=dt, a=1.) #a = fipymodel.predict(X0.copy()) np.maximum(np.array((0,1,2)), 1) import numexpr as ne X1 = X0.copy() data = [] for step in range(701): y = fipymodel.predict(X1) X1 = ne.evaluate('X1 + dt * y') X1 = np.minimum(X1, 1) X1 = np.maximum(X1, 0) if step % 10 == 0: data.append(X1[0]) print step, # JSAnimation import available at https://github.com/jakevdp/JSAnimation from JSAnimation import IPython_display from matplotlib import animation # create a simple animation fig = plt.figure() ax = plt.axes() im = plt.imshow(X0[0], vmin=0.3, vmax=0.7) plt.colorbar() def init(): im.set_array(X0[0]) return im def animate(i): im.set_array(data[i]) return im anim = animation.FuncAnimation(fig, animate, init_func=init, frames=len(data), interval=1, blit=True) from IPython.display import HTML plt.close(anim._fig) HTML(IPython_display.anim_to_html(anim)) 2**8 from pymks import FastMKSRegressionModel import numexpr np.random.seed(11) X0 = np.random.random((1, 256, 256)) dt = 1e-3 model = FastMKSRegressionModel(Nbin=6, threads=8) print X.shape print y.shape model.fit(X, y) model.resize_coeff(X0.shape[1:]) data = [] X1 = X0.copy() for step in range(15001): y1 = model.predict(X1) X1 = numexpr.evaluate('X1 + dt * y1') X1 = np.minimum(X1, 1) X1 = np.maximum(X1, 0) if step % 100 == 0: data.append(X1[0]) print step, print len(data) print data[0][0,0] print data[1][0,0] print data[150][0,0] for d in data[::10]: print max(d.flatten()) print min(d.flatten()) from IPython.display import clear_output import time fig = plt.figure() ax = plt.axes() a = plt.imshow(X0[0], vmin=0.3, vmax=0.7) plt.colorbar() for d in data: a.set_array(d) plt.show() time.sleep(1) clear_output() display(fig) # JSAnimation import available at https://github.com/jakevdp/JSAnimation from JSAnimation import IPython_display from matplotlib import animation # create a simple animation fig = plt.figure() ax = plt.axes() im = plt.imshow(X0[0], vmin=0.0, vmax=1.0) plt.colorbar() def init(): im.set_array(X0[0]) return im def animate(i): im.set_array(data[i]) return im anim = animation.FuncAnimation(fig, animate, init_func=init, frames=len(data), interval=1, blit=True) from IPython.display import HTML plt.close(anim._fig) HTML(IPython_display.anim_to_html(anim)) class A: def __init__(self): self.a = 1 b = A() getattr(b, 'a') # pip install line_profiler %load_ext line_profiler from pymks import FastMKSRegressionModel np.random.seed(11) X0 = np.random.random((1, 1024, 1024)) fast_model = FastMKSRegressionModel(Nbin=5, threads=8) slow_model = MKSRegressionModel(Nbin=5) fast_model.fit(X, y) fast_model.resize_coeff(X0.shape[1:]) slow_model.fit(X, y) slow_model.resize_coeff(X0.shape[1:]) X1 = X0.copy() #%lprun -f model.predict model.predict(X1.copy()) %timeit fast_model.predict(X1.copy()) %timeit slow_model.predict(X1.copy()) %lprun -f slow_model.predict slow_model.predict(X1.copy()) %lprun -f fast_model._calcfft fast_model.predict(X1.copy()) import time a = np.random.random((1024, 1024)) t0 = time.time() b = a.copy() Fa = np.fft.fft(b) print time.time() - t0 b = a.copy() t0 = time.time() Fa_ = _parallelFFT(b) print time.time() - t0 print np.allclose(Fa, Fa_) from parallelfft import _parallelFFT # pip install line_profiler %load_ext line_profiler a = np.random.random((1024 * 2**6,1024)) b = a.copy() %lprun -f _parallelFFT _parallelFFT(b) import pyfftw N = 1024 arr = np.random.random((N, N)) print arr.dtype a = pyfftw.n_byte_align_empty((N, N), 16, 'complex128') b = pyfftw.n_byte_align_empty((N, N), 16, 'complex128') # Plan an fft over the last axis for threads in (1, 2, 3, 4, 5, 6, 7, 8): fft_object = pyfftw.FFTW(a, b, threads=threads, axes=(0, 1)) a[:] = arr print 'number of threads:',threads %timeit b0 = fft_object(a) print 'numpy' a[:] = arr %timeit b1 = np.fft.fftn(a, axes=(0, 1)) print np.allclose(b0, b1)