Advertisement
MetricT

Unemployment with "Doppler map"

Jan 9th, 2020
1,915
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
R 5.01 KB | None | 0 0
  1. ### This script maps trend unemployment by state, with a "Doppler map" that
  2. ### shows whether unemployment is falling or rising.   In both instances, it
  3. ### uses a 6 month average of the data.
  4. ###
  5. ### 1.  Fetch unemployment data from each state from FRED
  6. ### 2.  Use mstl() to decompose data into trend/seasonal/remainder
  7. ### 3.  Measure the 6 month average of trend unemployment and d(trend)/dt
  8. ### 4.  Turn data into pretty map
  9.  
  10. ### You need a FRED API key in order to pull data from FRED.
  11. ### You may request an API key at:
  12. ### https://research.stlouisfed.org/useraccount/apikeys
  13. api_key_fred   <- "INSERT_YOUR_FRED_API_KEY_HERE"
  14.  
  15. ### We need the following packages for this example.
  16. packages <- c("tidyverse", "lubridate", "fredr", "ggplot2", "maps", "sf",
  17.               "ggthemes", "tsibble", "dplyr", "broom", "ggfortify",
  18.               "usmap", "forecast", "gridExtra", "cartogram", "RColorBrewer")
  19.  
  20. ### Install packages if needed, then load them quietly
  21. new_packages <- packages[!(packages %in% installed.packages()[, "Package"])]
  22. if (length(new_packages)) install.packages(new_packages, quiet = TRUE)
  23. invisible(lapply(packages, "library",
  24.                  quietly = TRUE,
  25.                  character.only = TRUE,
  26.                  warn.conflicts = FALSE))
  27.  
  28. ### Now set the FRED API key
  29. fredr_set_key(api_key_fred)
  30.  
  31. ### We need several years worth of data in order to filter out
  32. ### the seasonal component.  Ten years is a nice round number...
  33. date_start <- as.Date("2010-01-01")
  34. date_end   <- as.Date(now())
  35.  
  36. ### Create an empty tibble to hold our data
  37. trend_unemployment <- tibble(state        = character(),
  38.                              unemployment = numeric(),
  39.                              velocity     = numeric())
  40.  
  41. for (state in state.abb) {
  42.  
  43.   # Pull data from FRED
  44.   data <- fredr(series_id = paste(state, "URN", sep = ""),
  45.                 observation_start = date_start,
  46.                 observation_end   = date_end,
  47.                 frequency = "m")
  48.  
  49.   date   <- data %>% as_tsibble(index = "date") %>% pull("date")
  50.   values <- data %>% as_tsibble(index = "date") %>% pull("value")
  51.  
  52.   # Decompose the data and pluck out the trend component
  53.   trend <- values %>%
  54.            ts(frequency = 12, start = c(year(min(date)), month(min(date)))) %>%
  55.            mstl() %>%
  56.            trendcycle()
  57.  
  58.   # Compute the 6 month average of unemployment
  59.   unemployment <- trend %>% tail(n = 6) %>% mean()
  60.  
  61.   # Compute the 6 month average of unemployment velocity
  62.   velocity <- trend %>% diff() %>% tail(n = 6) %>% mean()
  63.  
  64.   # Append result to our tibble
  65.   trend_unemployment <- trend_unemployment %>%
  66.                         add_row(state, unemployment, velocity)
  67.  
  68. }
  69.  
  70. ### use the "state.name" and "state.abb" databases to convert two-letter state
  71. ### codes to lowercase names (tennnessee, california, etc).   Add the resulting
  72. ### lowercase name to the tibble
  73. match <- match(trend_unemployment$state, state.abb)
  74. trend_unemployment$state_name <- state.name[match] %>% tolower()
  75.  
  76. ### Load a map of states in the USA.   The maps have a list of lowercase
  77. ### state names, which will use to match against "pres" down below
  78. us_map <- maps::map("state", plot = FALSE, fill = TRUE)
  79.  
  80. ### Change the latitude/longitude data to a simple feature object
  81. us_map <- sf::st_as_sf(us_map)
  82.  
  83. ### Change the name of the "ID" column to "state_name"
  84. names(us_map) <- c("geometry", "state_name")
  85.  
  86. ### Remove the District of Colombia from our map
  87. us_map <- us_map %>% filter(state_name != "district of columbia")
  88.  
  89. ### Add our velocity data to the map data
  90. us_map <- us_map %>% left_join(trend_unemployment, by = "state_name")
  91.  
  92. ### Map the unemployment data
  93. p1 <- ggplot(us_map, aes(fill = unemployment), col = "black") +
  94.  
  95.       theme_void() +
  96.       theme(legend.position = "bottom") +
  97.  
  98.       # Title of our graph
  99.       labs(title = "Trend Unemployment Rate by State, 6 month average") +
  100.  
  101.       # Draw our map
  102.       geom_sf() +
  103.  
  104.       # Change the map projection to make it look nicer
  105.       coord_sf(crs = "+proj=aea +lat_1=25 +lat_2=50 +lon_0=-100", ndiscr = 0) +
  106.  
  107.       # Use "heat" colors to color the map by unemployment
  108.       scale_fill_gradientn(name = "Unemployment (%)",
  109.                            colours = rev(brewer.pal(9, "Blues")),
  110.                            trans = "log10")
  111.  
  112. p2 <- ggplot(us_map, col = "black") +
  113.  
  114.       theme_void() +
  115.       theme(legend.position = "bottom") +
  116.  
  117.       # Title of our graph
  118.       labs(title = "Trend Unemployment Rate Change by State, 6 month average") +
  119.  
  120.       # Color states using green for falling unemployment, red for rising
  121.       # unemployment
  122.       geom_sf(aes(fill = velocity)) +
  123.       scale_fill_gradient2(name = "Unemployment (%)",
  124.                            low  = "palegreen4",
  125.                            mid  = "white",
  126.                            high = "firebrick2") +
  127.  
  128.       # Change the map projection to make it look nicer
  129.       coord_sf(crs = "+proj=aea +lat_1=25 +lat_2=50 +lon_0=-100", ndiscr = 0)
  130.  
  131. ### Combine the two graphs
  132. grid.arrange(p1, p2, nrow = 2)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement