import pandas as pd
import requests
r=requests.get('http://api.ratings.food.gov.uk/Regions/1/9999999',headers={"x-api-version":2})
j=json.loads(r.content)
import json
df=pd.DataFrame.from_dict(j['regions'])
df[:3]
code | id | links | name | nameKey | |
---|---|---|---|---|---|
0 | E | 1 | [{u'href': u'/regions/1', u'rel': u'self'}] | East Counties | East Counties |
1 | EM | 2 | [{u'href': u'/regions/2', u'rel': u'self'}] | East Midlands | East Midlands |
2 | LDN | 3 | [{u'href': u'/regions/3', u'rel': u'self'}] | London | London |
df['links']=df['links'].apply(lambda x: x[0]['href'])
df
code | id | links | name | nameKey | |
---|---|---|---|---|---|
0 | E | 1 | /regions/1 | East Counties | East Counties |
1 | EM | 2 | /regions/2 | East Midlands | East Midlands |
2 | LDN | 3 | /regions/3 | London | London |
3 | NE | 4 | /regions/4 | North East | North East |
4 | NW | 5 | /regions/5 | North West | North West |
5 | SE | 6 | /regions/6 | South East | South East |
6 | SW | 7 | /regions/7 | South West | South West |
7 | WM | 8 | /regions/8 | West Midlands | West Midlands |
8 | YH | 9 | /regions/9 | Yorkshire and Humberside | Yorkshire and Humberside |
9 | NI | 10 | /regions/10 | Northern Ireland | Northern Ireland |
10 | SC | 11 | /regions/11 | Scotland | Scotland |
11 | WA | 12 | /regions/12 | Wales | Wales |
url='https://docs.google.com/a/okfn.org/spreadsheets/d/1M14S4hqG4F5P8H78VdOMMeoITOPBpVZEGoiCvXEFBQg/export?gid=0&format=csv'
df=pd.read_csv(url)
df
Name | Postcode | Person recommended | Type of food | |
---|---|---|---|---|
0 | Sacro Cuore | NW10 3NB | Marcus | Pizza |
1 | Casse-Croƻte | SE1 3XB | Timur | Bistro |
2 | Donna Margherita | SW11 5TE | Laia | Italian |
3 | Le Mercury | N1 1QY | Julia | French |
4 | Mondello | W1T 2QN | Danni | Italian |
5 | McDonald's | SW12 9AU | Andy | American |
6 | Silk Road | SE5 8TR | Steph | Chinese |
7 | Pedlar | SE15 4JR | Sam | Anything seasonal |
8 | Polpo Covent Garden | WC2E 7NA | Silvia | Venetian |
9 | McDonald's | WC1V 2JS | Tara | Fast food |
10 | Il Convivio | SW1W 9QN | Francesca | Italian |
11 | Al Maeda | E26DG | Ashraf | Turkish |
12 | Climpson's Arch | E8 3SB | Joe | Thai |
13 | Pizza Express | SE1 9QQ | Adrian | Pizza |
14 | Janetira | W1F 0SR | Madeleine | Thai |
15 | Pizza Express | SW17 7HR | Hina | Pizza |
16 | Bodean's | SW4 7SS | Jon | BBQ |
#http://api.ratings.food.gov.uk/help
#http://docs.python-requests.org/en/latest/user/quickstart/
params={'name':"McDonald's",'address':'SW12 9AU'}
r=requests.get('http://api.ratings.food.gov.uk/Establishments',
headers={"x-api-version":2},
params=params)
j=json.loads(r.content)
j
{u'establishments': [{u'AddressLine1': u'', u'AddressLine2': u'159 Balham High Road', u'AddressLine3': u'London', u'AddressLine4': u'', u'BusinessName': u'McDonalds Restaurants Ltd', u'BusinessType': u'Restaurant/Cafe/Canteen', u'BusinessTypeID': 1, u'Distance': None, u'FHRSID': 296343, u'LocalAuthorityBusinessID': u'10251', u'LocalAuthorityCode': u'532', u'LocalAuthorityEmailAddress': u'foodsafety@wandsworth.gov.uk', u'LocalAuthorityName': u'Wandsworth', u'LocalAuthorityWebSite': u'http://www.wandsworth.gov.uk', u'Phone': u'', u'PostCode': u'SW12 9AU', u'RatingDate': u'2014-08-28T00:00:00', u'RatingKey': u'fhrs_5_en-GB', u'RatingValue': u'5', u'RightToReply': u'', u'SchemeType': u'FHRS', u'geocode': {u'latitude': u'51.444321', u'longitude': u'-0.151817'}, u'links': [], u'scores': {u'ConfidenceInManagement': 5, u'Hygiene': 0, u'Structural': 5}}], u'links': [], u'meta': {u'dataSource': u'Lucene', u'extractDate': u'0001-01-01T00:00:00', u'itemCount': 0, u'pageNumber': 1, u'pageSize': 5000, u'returncode': u'OK', u'totalCount': 1, u'totalPages': 1}}
def getFoodRatingData(name,address):
params={'name':name,'address':address}
r=requests.get('http://api.ratings.food.gov.uk/Establishments',
headers={"x-api-version":2},
params=params)
return r
def parseFoodRatingData(jdata):
df=pd.DataFrame()
for establishment in jdata['establishments']:
info={}
for item in ['BusinessName','FHRSID','PostCode','RatingValue','RatingDate']:
info[item]= establishment[item]
for item in establishment['geocode']:
info[item]= establishment['geocode'][item]
for item in establishment['scores']:
info[item]= establishment['scores'][item]
df=df.append(info,ignore_index=True)
return df
def getAndParseFoodRatingData(name,address):
r=getFoodRatingData(name,address)
jdata=json.loads(r.content)
df=parseFoodRatingData(jdata)
return df
getinfo(j)
BusinessName | ConfidenceInManagement | FHRSID | Hygiene | PostCode | RatingDate | RatingValue | Structural | latitude | longitude | |
---|---|---|---|---|---|---|---|---|---|---|
0 | McDonalds Restaurants Ltd | 5 | 296343 | 0 | SW12 9AU | 2014-08-28T00:00:00 | 5 | 5 | 51.444321 | -0.151817 |
getAndParseFoodRatingData('Sacro Cuore','NW10 3NB')
BusinessName | ConfidenceInManagement | FHRSID | Hygiene | PostCode | RatingDate | RatingValue | Structural | latitude | longitude | |
---|---|---|---|---|---|---|---|---|---|---|
0 | Sacro Cuore | 10 | 533105 | 5 | NW10 3NB | 2012-12-10T00:00:00 | 4 | 0 | 51.531985 | -0.217244 |
adf=pd.DataFrame()
for place in df.iterrows():
adf=adf.append(getAndParseFoodRatingData(place[1]['Name'],place[1]['Postcode']))
adf
BusinessName | ConfidenceInManagement | FHRSID | Hygiene | PostCode | RatingDate | RatingValue | Structural | latitude | longitude | |
---|---|---|---|---|---|---|---|---|---|---|
0 | Sacro Cuore | 10 | 533105 | 5 | NW10 3NB | 2012-12-10T00:00:00 | 4 | 0 | 51.531985 | -0.217244 |
0 | Donna Margherita | 5 | 297821 | 5 | SW11 5TE | 2015-02-18T00:00:00 | 4 | 10 | 51.464613 | -0.159563 |
0 | Le Mercury | 5 | 416632 | 0 | N1 1QY | 2014-09-26T00:00:00 | 5 | 5 | 51.539829 | -0.102785 |
0 | Trattoria Mondello | 5 | 423681 | 0 | W1T 2QN | 2010-07-28T00:00:00 | 5 | 0 | 51.519705 | -0.135347 |
0 | McDonalds Restaurants Ltd | 5 | 296343 | 0 | SW12 9AU | 2014-08-28T00:00:00 | 5 | 5 | 51.444321 | -0.151817 |
0 | Pedler peckham rye | 10 | 739786 | 5 | SE15 4JR | 2015-02-11T00:00:00 | 4 | 5 | 51.465565 | -0.066722 |
0 | IL Convivio | 10 | 412112 | 15 | SW1W 9QN | 2014-02-05T00:00:00 | 2 | 15 | 51.492904 | -0.150458 |
0 | Pizza express | 5 | 364447 | 5 | SE1 9QQ | 2013-10-15T00:00:00 | 5 | 5 | 51.506383 | -0.088713 |
0 | Janetira Eat Thai | 20 | 413307 | 10 | W1F 0SR | 2014-10-28T00:00:00 | 1 | 15 | 51.512203 | -0.134645 |
0 | Pizza Express | 0 | 297020 | 0 | SW17 7HR | 2014-01-14T00:00:00 | 5 | 5 | 51.442492 | -0.166482 |
from IPython.display import HTML
import folium
def inline_map(map):
"""
Embeds the HTML source of the map directly into the IPython notebook.
This method will not work if the map depends on any files (json data). Also this uses
the HTML5 srcdoc attribute, which may not be supported in all browsers.
"""
map._build_map()
return HTML('<iframe srcdoc="{srcdoc}" style="width: 100%; height: 510px; border: none"></iframe>'.format(srcdoc=map.HTML.replace('"', '"')))
def embed_map(map, path="map.html"):
"""
Embeds a linked iframe to the map into the IPython notebook.
Note: this method will not capture the source of the map into the notebook.
This method should work for all maps (as long as they use relative urls).
"""
map.create_map(path=path)
return HTML('<iframe src="files/{path}" style="width: 100%; height: 510px; border: none"></iframe>'.format(path=path))
fmap=folium.Map(location=[51.5, 0], zoom_start=9)
for row in adf.iterrows():
latlon = [ row[1]['latitude'], row[1]['longitude'] ]
fmap.simple_marker( latlon, clustered_marker=True,
popup='Name: {name}<br/>Score: {score}'.format(name=row[1]['BusinessName'],
score=row[1]['RatingValue']) )
inline_map(fmap)
from pandas.io.json import json_normalize