%load_ext rmagic
X = np.array([0,1,2,3,4])
Y = np.array([3,5,4,6,7])
%%R -i X,Y -o XYcoef
XYlm = lm(Y~X)
XYcoef = coef(XYlm)
print(summary(XYlm))
par(mfrow=c(2,2))
plot(XYlm)
Call: lm(formula = Y ~ X) Residuals: 1 2 3 4 5 -0.2 0.9 -1.0 0.1 0.2 Coefficients: Estimate Std. Error t value Pr(>|t|) (Intercept) 3.2000 0.6164 5.191 0.0139 * X 0.9000 0.2517 3.576 0.0374 * --- Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1 Residual standard error: 0.7958 on 3 degrees of freedom Multiple R-squared: 0.81, Adjusted R-squared: 0.7467 F-statistic: 12.79 on 1 and 3 DF, p-value: 0.03739
XYcoef
array([ 3.2, 0.9])
station = "8518750"
startdate = "20121030"
stopdate = "20121031"
%%R -i station,startdate,stopdate -o urltotal
url1 = "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/"
url2 = "SixMin_Verified_Water_Level.ascii?"
url3 = "WATERLEVEL_6MIN_VFD_PX._STATION_ID," #return stationId
url4 = "WATERLEVEL_6MIN_VFD_PX._DATUM," #return datum
url5 = "WATERLEVEL_6MIN_VFD_PX.DATE_TIME," #return record date-time
url6 = "WATERLEVEL_6MIN_VFD_PX.WL_VALUE," #return water level value
url7 = "WATERLEVEL_6MIN_VFD_PX.I," #return quality flag
url8 = "WATERLEVEL_6MIN_VFD_PX.F," #return quality flag
url9 = "WATERLEVEL_6MIN_VFD_PX.R," #return quality flag
url10 = "WATERLEVEL_6MIN_VFD_PX.T" #return quality flag
url11 = "&WATERLEVEL_6MIN_VFD_PX._STATION_ID=%22" # station ID goes here
url12 = "%22"
url13 = "&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22"#we want MLLW as the datum
url14 = "&WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE=%22" # start date gets put in here
url15 = "%22"
url16 = "&WATERLEVEL_6MIN_VFD_PX._END_DATE=%22" # end date gets put in here
url17 = "%22"
urla = paste(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11)
urlb = paste(station,url12,url13,url14,startdate,url15,url16,stopdate,url17,sep="")
urltotal = paste(urla,urlb)
print urltotal[0]
http://opendap.co-ops.nos.noaa.gov/dods/IOOS/ SixMin_Verified_Water_Level.ascii? WATERLEVEL_6MIN_VFD_PX._STATION_ID, WATERLEVEL_6MIN_VFD_PX._DATUM, WATERLEVEL_6MIN_VFD_PX.DATE_TIME, WATERLEVEL_6MIN_VFD_PX.WL_VALUE, WATERLEVEL_6MIN_VFD_PX.I, WATERLEVEL_6MIN_VFD_PX.F, WATERLEVEL_6MIN_VFD_PX.R, WATERLEVEL_6MIN_VFD_PX.T &WATERLEVEL_6MIN_VFD_PX._STATION_ID=%22 8518750%22&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22&WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE=%2220121030%22&WATERLEVEL_6MIN_VFD_PX._END_DATE=%2220121031%22
#download the data
cat("Contacting server...\n"); flush.console()
dat = getURL(urltotal) #use RCurl to retrieve text into a vector 'dat'
cat("Data returned...\n"); flush.console()
Sys.sleep(2) #If you access data in a loop, be courteous and give the server
#a short break between requests
#cleanup
rm(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,url12,url13,url14)
rm(url15,url16,url17)
con = textConnection(dat) #create text Connection to dat vector
all.lines = readLines(con) #read lines of text into separate slots in a vector
close(con) #close connection to dat vector
if (length(grep('^Error',all.lines))>0) { #check for error in retrieval
cat("There was an error...\n")
cat(dat,"\n") #print contents of dat to show error
flush.console()
} else { #retrieval was successful, parse the text
#The column headers are typically preceded by a line of dashes
headerlines = grep("^--------",all.lines) #find index of headers (-1)
#read column header names into a vector
con = textConnection(dat)
headers = scan(con, skip = headerlines, nlines = 1, sep = ",",
what = "character", strip.white = TRUE)
close(con)
#read rest of the data into a data frame 'df'
con = textConnection(dat)
df = read.table(con, skip = headerlines+1, sep = ",", header = FALSE,
quote = "\"", col.names = headers, strip.white = TRUE,
stringsAsFactors = FALSE)
close(con)
###########################################################################
#The following operations will need to be altered if you change the
#fields or data type being returned by the OPeNDAP server
#Convert the time column to POSIX time (seconds since 1970-01-01 00:00:00)
df[,3] = as.POSIXct(strptime(df[,3],format = "%b %d %Y %I:%M%p",
tz = "GMT"))
#Give the columns shorter names
names(df) = c("stationId","datum","TimeUTC","TideHT","Flag.Inferred",
"Flag.Flat.Tol","Flag.Rate.Tol","Flag.Temp.Tol")
#Uncomment this if you want to plot the data
plot(df$TimeUTC, df$TideHT, type = "l",
xlab = "Date",ylab = "Tide Height, meters")
#Save data automatically to a .csv file.
filename = paste("Station_",station,"_",startdate,"-",enddate,
".csv",sep = "") #make file name
write.csv(df,filename,row.names = FALSE, quote = FALSE) #write file to disk
cat("Saved to ",getwd(),"/",filename,"\n",sep = "")
#Alternate file save method lets user specify file name at run time
#Uncomment this if you wish to use it instead of the automated file
#output above
# write.csv(df,file.choose(),row.names = FALSE, quote = FALSE)
#cleanup
rm(dat,con,all.lines,startdate,enddate,filename,headerlines, headers)
} #end of if-else statement
# coops_tide_ht_retrieval.R
# This script will download a set of verified tide height data from a NOAA
# CO-OPS DODS/OPeNDAP server and parse it into a data frame to be saved to disk
#
# Station list: http://opendap.co-ops.nos.noaa.gov/stations/index.jsp
# OPeNDAP server gateway: http://opendap.co-ops.nos.noaa.gov/dods/
# The gateway has links to several other data types available including
# water temperature, air temperature, wind etc.
# See http://docs.opendap.org/index.php/UserGuideOPeNDAPMessages for info on
# structuring OPeNDAP queries.
#
# Six-minute water level data can only be retrieved 1 month at a time. Other
# fields such as water temperature could return up to 1 year at a time. This
# script only deals with 6-minute tide height data, but could serve as a guide
# for accessing other data types from NOAA CO-OPS.
# The tide height is reported in meters.
# Author: Luke Miller Feb 2011
###############################################################################
require(RCurl)
noquote(print("Enter NOAA tide station number (i.e. Monterey = 9413450): "))
station = scan("",what = character(),nlines = 1)
noquote(print("Enter start date (format: 20080123 = Jan 23, 2008): "))
startdate = scan("",what = character(),nlines = 1) #get one line of values
noquote(print("Enter end date (format: 20081231 = Dec 31, 2008): "))
enddate = scan("",what = character(),nlines = 1)
#OPeNDAP query for 6-minute verified water level looks like this (on 1 line):
#http://opendap.co-ops.nos.noaa.gov/dods/IOOS/
#SixMin_Verified_Water_Level.ascii?
#WATERLEVEL_6MIN_VFD_PX._STATION_ID,
#WATERLEVEL_6MIN_VFD_PX._DATUM,
#WATERLEVEL_6MIN_VFD_PX.DATE_TIME,
#WATERLEVEL_6MIN_VFD_PX.WL_VALUE,
#WATERLEVEL_6MIN_VFD_PX.I,
#WATERLEVEL_6MIN_VFD_PX.F,
#WATERLEVEL_6MIN_VFD_PX.R,
#WATERLEVEL_6MIN_VFD_PX.T
#&WATERLEVEL_6MIN_VFD_PX._STATION_ID=%229449880%22
#&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22
#&WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE=%2220080801%22
#&WATERLEVEL_6MIN_VFD_PX._END_DATE=%2220080808%22
#The parts of the url to be assembled:
url1 = "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/"
url2 = "SixMin_Verified_Water_Level.ascii?"
url3 = "WATERLEVEL_6MIN_VFD_PX._STATION_ID," #return stationId
url4 = "WATERLEVEL_6MIN_VFD_PX._DATUM," #return datum
url5 = "WATERLEVEL_6MIN_VFD_PX.DATE_TIME," #return record date-time
url6 = "WATERLEVEL_6MIN_VFD_PX.WL_VALUE," #return water level value
url7 = "WATERLEVEL_6MIN_VFD_PX.I," #return quality flag
url8 = "WATERLEVEL_6MIN_VFD_PX.F," #return quality flag
url9 = "WATERLEVEL_6MIN_VFD_PX.R," #return quality flag
url10 = "WATERLEVEL_6MIN_VFD_PX.T" #return quality flag
#The remaining parts of the url specify how to filter the data on the server
#to only retrieve the desired station and date range. Values must be enclosed
#in ascii double-quotes, which are represented by the code %22
url11 = "&WATERLEVEL_6MIN_VFD_PX._STATION_ID=%22" #...insert stationId here...
url12 = "%22"
url13 = "&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22"#we want MLLW as the datum
url14 = "&WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE=%22" #...insert start date here...
url15 = "%22"
url16 = "&WATERLEVEL_6MIN_VFD_PX._END_DATE=%22" #...insert end date here...
url17 = "%22"
#Assemble the URL
urltotal = paste(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,
station,url12,url13,url14,startdate,url15,url16,enddate,url17,sep ="")
#Download the data
cat("Contacting server...\n"); flush.console()
dat = getURL(urltotal) #use RCurl to retrieve text into a vector 'dat'
cat("Data returned...\n"); flush.console()
Sys.sleep(2) #If you access data in a loop, be courteous and give the server
#a short break between requests
#cleanup
rm(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,url12,url13,url14)
rm(url15,url16,url17)
con = textConnection(dat) #create text Connection to dat vector
all.lines = readLines(con) #read lines of text into separate slots in a vector
close(con) #close connection to dat vector
if (length(grep('^Error',all.lines))>0) { #check for error in retrieval
cat("There was an error...\n")
cat(dat,"\n") #print contents of dat to show error
flush.console()
} else { #retrieval was successful, parse the text
#The column headers are typically preceded by a line of dashes
headerlines = grep("^--------",all.lines) #find index of headers (-1)
#read column header names into a vector
con = textConnection(dat)
headers = scan(con, skip = headerlines, nlines = 1, sep = ",",
what = "character", strip.white = TRUE)
close(con)
#read rest of the data into a data frame 'df'
con = textConnection(dat)
df = read.table(con, skip = headerlines+1, sep = ",", header = FALSE,
quote = "\"", col.names = headers, strip.white = TRUE,
stringsAsFactors = FALSE)
close(con)
###########################################################################
#The following operations will need to be altered if you change the
#fields or data type being returned by the OPeNDAP server
#Convert the time column to POSIX time (seconds since 1970-01-01 00:00:00)
df[,3] = as.POSIXct(strptime(df[,3],format = "%b %d %Y %I:%M%p",
tz = "GMT"))
#Give the columns shorter names
names(df) = c("stationId","datum","TimeUTC","TideHT","Flag.Inferred",
"Flag.Flat.Tol","Flag.Rate.Tol","Flag.Temp.Tol")
#Uncomment this if you want to plot the data
# plot(df$TimeUTC, df$TideHT, type = "l",
# xlab = "Date",ylab = "Tide Height, meters")
#Save data automatically to a .csv file.
filename = paste("Station_",station,"_",startdate,"-",enddate,
".csv",sep = "") #make file name
write.csv(df,filename,row.names = FALSE, quote = FALSE) #write file to disk
cat("Saved to ",getwd(),"/",filename,"\n",sep = "")
flush.console()
#Alternate file save method lets user specify file name at run time
#Uncomment this if you wish to use it instead of the automated file
#output above
# write.csv(df,file.choose(),row.names = FALSE, quote = FALSE)
#cleanup
rm(dat,con,all.lines,startdate,enddate,filename,headerlines, headers)
} #end of if-else statement
%%R
require(RCurl)
Loading required package: RCurl
%%R
station <- c("9413450")
startdate <- c("20080123")
enddate <- c("20080124")
%%R
url1 = "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/"
url2 = "SixMin_Verified_Water_Level.ascii?"
url3 = "WATERLEVEL_6MIN_VFD_PX._STATION_ID,"
url4 = "WATERLEVEL_6MIN_VFD_PX._DATUM,"
url5 = "WATERLEVEL_6MIN_VFD_PX.DATE_TIME,"
url6 = "WATERLEVEL_6MIN_VFD_PX.WL_VALUE,"
url7 = "WATERLEVEL_6MIN_VFD_PX.I,"
url8 = "WATERLEVEL_6MIN_VFD_PX.F,"
url9 = "WATERLEVEL_6MIN_VFD_PX.R,"
url10 = "WATERLEVEL_6MIN_VFD_PX.T"
url11 = "&WATERLEVEL_6MIN_VFD_PX._STATION_ID=%22"
url12 = "%22"
url13 = "&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22"
%%R
station <- c("9413450")
startdate <- c("20080123")
enddate = <- c("20080124")
url1 = "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/"
url2 = "SixMin_Verified_Water_Level.ascii?"
url3 = "WATERLEVEL_6MIN_VFD_PX._STATION_ID,"
url4 = "WATERLEVEL_6MIN_VFD_PX._DATUM,"
url5 = "WATERLEVEL_6MIN_VFD_PX.DATE_TIME,"
url6 = "WATERLEVEL_6MIN_VFD_PX.WL_VALUE,"
url7 = "WATERLEVEL_6MIN_VFD_PX.I,"
url8 = "WATERLEVEL_6MIN_VFD_PX.F,"
url9 = "WATERLEVEL_6MIN_VFD_PX.R,"
url10 = "WATERLEVEL_6MIN_VFD_PX.T"
url11 = "&WATERLEVEL_6MIN_VFD_PX._STATION_ID=%22"
url12 = "%22"
url13 = "&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22"
url14 = "&WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE=%22" # start date gets put in here
url15 = "%22"
url16 = "&WATERLEVEL_6MIN_VFD_PX._END_DATE=%22" # end date gets put in here
url17 = "%22"
####### DON'T CHANGE ANYTHING ABOVE THIS LINE ############
##############################################
#Assemble the URL
urltotal = paste(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,
station,url12,url13,url14,startdate,url15,url16,enddate,url17,sep ="")
#Download the data
#cat("Contacting server...\n"); flush.console()
dat = getURL(urltotal) #use RCurl to retrieve text into a vector 'dat'
#cat("Data returned...\n"); flush.console()
Sys.sleep(2) #If you access data in a loop, be courteous and give the server
#a short break between requests
#cleanup
rm(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,url12,url13,url14)
rm(url15,url16,url17)
con = textConnection(dat) #create text Connection to dat vector
all.lines = readLines(con) #read lines of text into separate slots in a vector
close(con) #close connection to dat vector
--------------------------------------------------------------------------- RInterpreterError Traceback (most recent call last) <ipython-input-17-9de0d21a54a2> in <module>() ----> 1 get_ipython()._run_cached_cell_magic(u'R', u'') /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/core/interactiveshell.pyc in _run_cached_cell_magic(self, magic_name, line) 2552 cell = self._current_cell_magic_body 2553 self._current_cell_magic_body = None -> 2554 return self.run_cell_magic(magic_name, line, cell) 2555 2556 def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True): /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/core/interactiveshell.pyc in run_cell_magic(self, magic_name, line, cell) 2132 magic_arg_s = self.var_expand(line, stack_depth) 2133 with self.builtin_trap: -> 2134 result = fn(magic_arg_s, cell) 2135 return result 2136 /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/extensions/rmagic.py in R(self, line, cell, local_ns) /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/core/magic.pyc in <lambda>(f, *a, **k) 189 # but it's overkill for just that one bit of state. 190 def magic_deco(arg): --> 191 call = lambda f, *a, **k: f(*a, **k) 192 193 if callable(arg): /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/extensions/rmagic.py in R(self, line, cell, local_ns) 550 return_output = False 551 else: --> 552 text_result, result = self.eval(code) 553 text_output += text_result 554 /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/extensions/rmagic.py in eval(self, line) 159 except (ri.RRuntimeError, ValueError) as exception: 160 warning_or_other_msg = self.flush() # otherwise next return seems to have copy of error --> 161 raise RInterpreterError(line, str_to_unicode(str(exception)), warning_or_other_msg) 162 text_output = self.flush() 163 ri.set_writeconsole(old_writeconsole) RInterpreterError: Failed to parse and evaluate line u'station <- c("9413450")\nstartdate <- c("20080123")\nenddate = <- c("20080124")\n\n\nurl1 = "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/"\nurl2 = "SixMin_Verified_Water_Level.ascii?"\nurl3 = "WATERLEVEL_6MIN_VFD_PX._STATION_ID," \nurl4 = "WATERLEVEL_6MIN_VFD_PX._DATUM," \nurl5 = "WATERLEVEL_6MIN_VFD_PX.DATE_TIME," \nurl6 = "WATERLEVEL_6MIN_VFD_PX.WL_VALUE,"\nurl7 = "WATERLEVEL_6MIN_VFD_PX.I," \nurl8 = "WATERLEVEL_6MIN_VFD_PX.F," \nurl9 = "WATERLEVEL_6MIN_VFD_PX.R," \nurl10 = "WATERLEVEL_6MIN_VFD_PX.T" \nurl11 = "&WATERLEVEL_6MIN_VFD_PX._STATION_ID=%22" \nurl12 = "%22"\nurl13 = "&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22"\nurl14 = "&WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE=%22" # start date gets put in here\nurl15 = "%22"\nurl16 = "&WATERLEVEL_6MIN_VFD_PX._END_DATE=%22" # end date gets put in here\nurl17 = "%22"\n####### DON\'T CHANGE ANYTHING ABOVE THIS LINE ############\n##############################################\n\n#Assemble the URL\nurltotal = paste(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,\n\t\tstation,url12,url13,url14,startdate,url15,url16,enddate,url17,sep ="")\n\n#Download the data\n#cat("Contacting server...\\n"); flush.console()\ndat = getURL(urltotal) #use RCurl to retrieve text into a vector \'dat\'\n#cat("Data returned...\\n"); flush.console()\nSys.sleep(2) #If you access data in a loop, be courteous and give the server\n#a short break between requests\n#cleanup\nrm(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,url12,url13,url14)\nrm(url15,url16,url17)\n\ncon = textConnection(dat) #create text Connection to dat vector\nall.lines = readLines(con) #read lines of text into separate slots in a vector\nclose(con) #close connection to dat vector'. R error message: u'Error while parsing the string.'
%%R
station <- c("9413450")
startdate <- c("20080123")
enddate = <- c("20080124")
url1 = "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/"
url2 = "SixMin_Verified_Water_Level.ascii?"
url3 = "WATERLEVEL_6MIN_VFD_PX._STATION_ID,"
url4 = "WATERLEVEL_6MIN_VFD_PX._DATUM,"
url5 = "WATERLEVEL_6MIN_VFD_PX.DATE_TIME,"
url6 = "WATERLEVEL_6MIN_VFD_PX.WL_VALUE,"
url7 = "WATERLEVEL_6MIN_VFD_PX.I,"
url8 = "WATERLEVEL_6MIN_VFD_PX.F,"
url9 = "WATERLEVEL_6MIN_VFD_PX.R,"
url10 = "WATERLEVEL_6MIN_VFD_PX.T"
url11 = "&WATERLEVEL_6MIN_VFD_PX._STATION_ID=%22"
url12 = "%22"
url13 = "&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22"
url14 = "&WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE=%22" # start date gets put in here
url15 = "%22"
url16 = "&WATERLEVEL_6MIN_VFD_PX._END_DATE=%22" # end date gets put in here
url17 = "%22"
####### DON'T CHANGE ANYTHING ABOVE THIS LINE ############
##############################################
#Assemble the URL
urltotal = paste(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,
station,url12,url13,url14,startdate,url15,url16,enddate,url17,sep ="")
#Download the data
#cat("Contacting server...\n"); flush.console()
dat = getURL(urltotal) #use RCurl to retrieve text into a vector 'dat'
#cat("Data returned...\n"); flush.console()
Sys.sleep(2) #If you access data in a loop, be courteous and give the server
#a short break between requests
#cleanup
rm(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,url12,url13,url14)
rm(url15,url16,url17)
con = textConnection(dat) #create text Connection to dat vector
all.lines = readLines(con) #read lines of text into separate slots in a vector
close(con) #close connection to dat vector
if (length(grep('^Error',all.lines))>0) { #check for error in retrieval
cat("There was an error...\n")
cat(dat,"\n") #print contents of dat to show error
flush.console()
} else { #retrieval was successful, parse the text
#The column headers are typically preceded by a line of dashes
headerlines = grep("^--------",all.lines) #find index of headers (-1)
#read column header names into a vector
con = textConnection(dat)
headers = scan(con, skip = headerlines, nlines = 1, sep = ",",
what = "character", strip.white = TRUE)
close(con)
#read rest of the data into a data frame 'df'
con = textConnection(dat)
df = read.table(con, skip = headerlines+1, sep = ",", header = FALSE,
quote = "\"", col.names = headers, strip.white = TRUE,
stringsAsFactors = FALSE)
close(con)
###########################################################################
#The following operations will need to be altered if you change the
#fields or data type being returned by the OPeNDAP server
#Convert the time column to POSIX time (seconds since 1970-01-01 00:00:00)
df[,3] = as.POSIXct(strptime(df[,3],format = "%b %d %Y %I:%M%p",
tz = "GMT"))
#Give the columns shorter names
names(df) = c("stationId","datum","TimeUTC","TideHT","Flag.Inferred",
"Flag.Flat.Tol","Flag.Rate.Tol","Flag.Temp.Tol")
#Uncomment this if you want to plot the data
# plot(df$TimeUTC, df$TideHT, type = "l",
# xlab = "Date",ylab = "Tide Height, meters")
#Save data automatically to a .csv file.
filename = paste("Station_",station,"_",startdate,"-",enddate,
".csv",sep = "") #make file name
write.csv(df,filename,row.names = FALSE, quote = FALSE) #write file to disk
cat("Saved to ",getwd(),"/",filename,"\n",sep = "")
# flush.console()
#Alternate file save method lets user specify file name at run time
#Uncomment this if you wish to use it instead of the automated file
#output above
# write.csv(df,file.choose(),row.names = FALSE, quote = FALSE)
#cleanup
rm(dat,con,all.lines,startdate,enddate,filename,headerlines, headers)
} #end of if-else statement
--------------------------------------------------------------------------- RInterpreterError Traceback (most recent call last) <ipython-input-9-9de0d21a54a2> in <module>() ----> 1 get_ipython()._run_cached_cell_magic(u'R', u'') /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/core/interactiveshell.pyc in _run_cached_cell_magic(self, magic_name, line) 2552 cell = self._current_cell_magic_body 2553 self._current_cell_magic_body = None -> 2554 return self.run_cell_magic(magic_name, line, cell) 2555 2556 def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True): /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/core/interactiveshell.pyc in run_cell_magic(self, magic_name, line, cell) 2132 magic_arg_s = self.var_expand(line, stack_depth) 2133 with self.builtin_trap: -> 2134 result = fn(magic_arg_s, cell) 2135 return result 2136 /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/extensions/rmagic.py in R(self, line, cell, local_ns) /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/core/magic.pyc in <lambda>(f, *a, **k) 189 # but it's overkill for just that one bit of state. 190 def magic_deco(arg): --> 191 call = lambda f, *a, **k: f(*a, **k) 192 193 if callable(arg): /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/extensions/rmagic.py in R(self, line, cell, local_ns) 550 return_output = False 551 else: --> 552 text_result, result = self.eval(code) 553 text_output += text_result 554 /home/rsignell/epd-7.2-1/lib/python2.7/site-packages/IPython/extensions/rmagic.py in eval(self, line) 159 except (ri.RRuntimeError, ValueError) as exception: 160 warning_or_other_msg = self.flush() # otherwise next return seems to have copy of error --> 161 raise RInterpreterError(line, str_to_unicode(str(exception)), warning_or_other_msg) 162 text_output = self.flush() 163 ri.set_writeconsole(old_writeconsole) RInterpreterError: Failed to parse and evaluate line u'station <- c("9413450")\nstartdate <- c("20080123")\nenddate = <- c("20080124")\n\n\nurl1 = "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/"\nurl2 = "SixMin_Verified_Water_Level.ascii?"\nurl3 = "WATERLEVEL_6MIN_VFD_PX._STATION_ID," \nurl4 = "WATERLEVEL_6MIN_VFD_PX._DATUM," \nurl5 = "WATERLEVEL_6MIN_VFD_PX.DATE_TIME," \nurl6 = "WATERLEVEL_6MIN_VFD_PX.WL_VALUE,"\nurl7 = "WATERLEVEL_6MIN_VFD_PX.I," \nurl8 = "WATERLEVEL_6MIN_VFD_PX.F," \nurl9 = "WATERLEVEL_6MIN_VFD_PX.R," \nurl10 = "WATERLEVEL_6MIN_VFD_PX.T" \nurl11 = "&WATERLEVEL_6MIN_VFD_PX._STATION_ID=%22" \nurl12 = "%22"\nurl13 = "&WATERLEVEL_6MIN_VFD_PX._DATUM=%22MLLW%22"\nurl14 = "&WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE=%22" # start date gets put in here\nurl15 = "%22"\nurl16 = "&WATERLEVEL_6MIN_VFD_PX._END_DATE=%22" # end date gets put in here\nurl17 = "%22"\n####### DON\'T CHANGE ANYTHING ABOVE THIS LINE ############\n##############################################\n\n#Assemble the URL\nurltotal = paste(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,\n\t\tstation,url12,url13,url14,startdate,url15,url16,enddate,url17,sep ="")\n\n#Download the data\n#cat("Contacting server...\\n"); flush.console()\ndat = getURL(urltotal) #use RCurl to retrieve text into a vector \'dat\'\n#cat("Data returned...\\n"); flush.console()\nSys.sleep(2) #If you access data in a loop, be courteous and give the server\n#a short break between requests\n#cleanup\nrm(url1,url2,url3,url4,url5,url6,url7,url8,url9,url10,url11,url12,url13,url14)\nrm(url15,url16,url17)\n\ncon = textConnection(dat) #create text Connection to dat vector\nall.lines = readLines(con) #read lines of text into separate slots in a vector\nclose(con) #close connection to dat vector\n\nif (length(grep(\'^Error\',all.lines))>0) { #check for error in retrieval\n\tcat("There was an error...\\n")\n\tcat(dat,"\\n") #print contents of dat to show error\n\tflush.console()\n} else { #retrieval was successful, parse the text\n\n\t#The column headers are typically preceded by a line of dashes\n\theaderlines = grep("^--------",all.lines) #find index of headers (-1)\n\n\t#read column header names into a vector\n\tcon = textConnection(dat)\n\theaders = scan(con, skip = headerlines, nlines = 1, sep = ",",\n\t\t\twhat = "character", strip.white = TRUE)\n\tclose(con)\n\n\t#read rest of the data into a data frame \'df\'\n\tcon = textConnection(dat)\n\tdf = read.table(con, skip = headerlines+1, sep = ",", header = FALSE,\n\t\t\tquote = "\\"", col.names = headers, strip.white = TRUE,\n\t\t\tstringsAsFactors = FALSE)\n\tclose(con)\n\n\t###########################################################################\n\t#The following operations will need to be altered if you change the \n\t#fields or data type being returned by the OPeNDAP server\n\n\t#Convert the time column to POSIX time (seconds since 1970-01-01 00:00:00)\n\tdf[,3] = as.POSIXct(strptime(df[,3],format = "%b %d %Y %I:%M%p",\n\t\t\t\t\ttz = "GMT"))\n\n\t#Give the columns shorter names\n\tnames(df) = c("stationId","datum","TimeUTC","TideHT","Flag.Inferred",\n\t\t\t"Flag.Flat.Tol","Flag.Rate.Tol","Flag.Temp.Tol")\n\n\t#Uncomment this if you want to plot the data\n#\tplot(df$TimeUTC, df$TideHT, type = "l",\n#\t\t\txlab = "Date",ylab = "Tide Height, meters")\n\n\t#Save data automatically to a .csv file. \n\tfilename = paste("Station_",station,"_",startdate,"-",enddate,\n\t\t\t".csv",sep = "") #make file name\n\twrite.csv(df,filename,row.names = FALSE, quote = FALSE) #write file to disk\n\tcat("Saved to ",getwd(),"/",filename,"\\n",sep = "")\n#\tflush.console()\n\n\t#Alternate file save method lets user specify file name at run time\n\t#Uncomment this if you wish to use it instead of the automated file \n\t#output above\n#\twrite.csv(df,file.choose(),row.names = FALSE, quote = FALSE)\n\n\t#cleanup\n\trm(dat,con,all.lines,startdate,enddate,filename,headerlines, headers)\n\n} #end of if-else statement\n'. R error message: u'Error while parsing the string.'