%pylab inline # Using FRED requires a personal key # To avoid exposing my key, I have it in a separate file from keys import fred_key series_id = 'GNPCA' request_url = 'http://api.stlouisfed.org/fred/series?series_id=' + series_id + '&api_key=' + fred_key + '&file_type=json' import urllib2 f = urllib2.urlopen(request_url) data = f.read() data import json json_data = json.loads(data) type(json_data) json_data.keys() json_data[u'realtime_start'], json_data[u'realtime_end'] json_data[u'seriess'] request_url = 'http://api.stlouisfed.org/fred/series/observations?series_id=' + series_id + '&api_key=' + fred_key + '&file_type=json' import urllib2 f = urllib2.urlopen(request_url) data = f.read() json_data = json.loads(data) json_data.keys() json_data[u'count'],json_data[u'order_by'], json_data[u'observation_start'], json_data[ u'file_type'], json_data[ u'observation_end'] json_data[u'realtime_start'], json_data[u'realtime_end'], json_data[u'sort_order'], json_data[u'limit'], json_data[u'offset'] json_data[u'output_type'], json_data[u'units'] values = [] for o in json_data['observations']: values.append(float(o['value'])) plot(values) f = open('../shared/gnpca.txt', 'w') for v in values: f.write(str(v) + '\n') f.close() series_id = 'UNRATE' request_url = 'http://api.stlouisfed.org/fred/series/observations?series_id=' + series_id + '&api_key=' + fred_key + '&file_type=json' f = urllib2.urlopen(request_url) data = f.read() json_data = json.loads(data) values = [] for o in json_data['observations']: values.append(float(o['value'])) plot(values) f = open('../shared/unrate.txt', 'w') for v in values: f.write(str(v) + '\n') f.close() series_id = 'GS10' request_url = 'http://api.stlouisfed.org/fred/series/observations?series_id=' + series_id + '&api_key=' + fred_key + '&file_type=json' f = urllib2.urlopen(request_url) data = f.read() json_data = json.loads(data) values = [] for o in json_data['observations']: values.append(float(o['value'])) plot(values) f = open('../shared/gs10.txt', 'w') for v in values: f.write(str(v) + '\n') f.close() request_url = 'http://ichart.finance.yahoo.com/table.csv?s=^GSPC&ignore=.csv' f = urllib2.urlopen(request_url) data = f.read() import csv parsed_csv = csv.reader(data.split('\n')) type(parsed_csv) parsed_csv.next() parsed_csv.next() parsed_csv.next() highs = [] for row in parsed_csv: if len(row) > 0: highs.append(float(row[2])) plot(highs) plot(highs[::-1]) import urllib urllib.urlretrieve('http://www.eigenvector.com/data/Corn/corn.mat', 'corn.mat') from scipy.io import loadmat corn = loadmat('corn.mat') type(corn) corn.keys() corn['mp6spec'][0][0][7] plot(corn['mp6spec'][0][0][7][0,:]) fib = [1,1,2,3,5,8,13] import struct enc = struct.pack('i', 10) enc hex(10) chr(10) len(enc) ord('\n'), ord('a') 'i'*len(fib) bytes = struct.pack('i'*len(fib), *fib) bytes fib = struct.unpack('iiiiiii', bytes) fib fib_f = struct.unpack('fffffff', bytes) fib_f bytes = struct.pack('f'*len(fib), *fib) bytes fib_f = struct.unpack('fffffff', bytes) fib_f