You can turn all the tables into a wide data frame with list operations:
library(rvest)
library(magrittr)
library(dplyr)
date <- 20130701
rng <- c(1:4)
my_tabs <- lapply(rng, function(i) {
url <- sprintf("http://apims.doe.gov.my/apims/hourly%d.php?date=%s", i, date)
pg <- html(url)
pg %>% html_nodes("table") %>% extract2(1) %>% html_table(header=TRUE)
})
glimpse(plyr::join_all(my_tabs, by=colnames(my_tabs[[1]][1:2])))
## Observations: 52
## Variables:
## $ NEGERI / STATE (chr) "Johor", "Johor", "Johor", "Johor", "Kedah...
## $ KAWASAN/AREA (chr) "Kota Tinggi", "Larkin Lama", "Muar", "Pas...
## $ MASA/TIME12:00AM (chr) "63*", "53*", "51*", "55*", "37*", "48*", ...
## $ MASA/TIME01:00AM (chr) "62*", "52*", "52*", "55*", "36*", "48*", ...
## $ MASA/TIME02:00AM (chr) "61*", "51*", "53*", "55*", "35*", "48*", ...
## $ MASA/TIME03:00AM (chr) "60*", "50*", "54*", "55*", "35*", "48*", ...
## $ MASA/TIME04:00AM (chr) "59*", "49*", "54*", "54*", "34*", "47*", ...
## $ MASA/TIME05:00AM (chr) "58*", "48*", "54*", "54*", "34*", "45*", ...
## $ MASA/TIME06:00AM (chr) "57*", "47*", "53*", "53*", "33*", "45*", ...
## $ MASA/TIME07:00AM (chr) "57*", "46*", "52*", "53*", "32*", "45*", ...
## $ MASA/TIME08:00AM (chr) "56*", "45*", "52*", "52*", "32*", "44*", ...
## ...
I rarely actually load/use plyr
anymore due to naming collisions with dplyr
but join_all
is perfect for this situation.
It’s also likely you’ll need this data in long format:
plyr::join_all(my_tabs, by=colnames(my_tabs[[1]][1:2])) %>%
tidyr::gather(masa, nilai, -1, -2) %>%
# better column names
rename(nigeri=`NEGERI / STATE`, kawasan=`KAWASAN/AREA`) %>%
# cleanup & convert time (using local timezone)
# make readings numeric; NA will sub for #
mutate(masa=gsub("MASA/TIME", "", masa),
masa=as.POSIXct(sprintf("%s %s", date, masa), format="%Y%m%d %H:%M%p", tz="Asia/Kuala_Lumpur"),
nilai=as.numeric(gsub("[[:punct:]]+", "", nilai))) -> pollut
head(pollut)
## nigeri kawasan masa nilai
## 1 Johor Kota Tinggi 2013-07-01 12:00:00 63
## 2 Johor Larkin Lama 2013-07-01 12:00:00 53
## 3 Johor Muar 2013-07-01 12:00:00 51
## 4 Johor Pasir Gudang 2013-07-01 12:00:00 55
## 5 Kedah Alor Setar 2013-07-01 12:00:00 37
## 6 Kedah Bakar Arang, Sg. Petani 2013-07-01 12:00:00 48
6
solved Scraping data off site using 4 urls for one day using R