Last active
October 12, 2022 14:31
-
-
Save CorradoLanera/a3d39f35c5d8fa91297f0974e8b820af to your computer and use it in GitHub Desktop.
Shiny + future + plumber
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # | |
| # This is a Shiny web application. You can run the application by | |
| # clicking the 'Run App' button above. | |
| # | |
| # Find out more about building applications with Shiny here: | |
| # | |
| # http://shiny.rstudio.com/ | |
| # | |
| library(shiny) | |
| library(future) | |
| plan("multisession") | |
| library(httr) | |
| library(magrittr) | |
| fast <- function(t, which_me = "First") { | |
| force(t) | |
| paste(which_me, "fast done.") |> | |
| paste(Sys.time()) | |
| } | |
| slow <- function(t, which_me = "First") { | |
| Sys.sleep(t) | |
| paste(which_me, "slow done.") |> | |
| paste(Sys.time()) | |
| } | |
| plumber_fast <- function(t, which_me = "first") { | |
| connectApiUrl <- "http://127.0.0.1:9038" | |
| GET(connectApiUrl, path = "fast", | |
| query = list(t = t, which_me = which_me)) %>% | |
| content("parsed") |> | |
| paste(Sys.time()) | |
| } | |
| plumber_slow <- function(t, which_me = "first") { | |
| connectApiUrl <- "http://127.0.0.1:9038" | |
| GET(connectApiUrl, path = "slow", | |
| query = list(t = t, which_me = which_me)) %>% | |
| content("parsed") |> | |
| paste(Sys.time()) | |
| } | |
| plumber_future_fast <- function(t, which_me = "first") { | |
| connectApiUrl <- "http://127.0.0.1:9038" | |
| GET(connectApiUrl, path = "future_fast", | |
| query = list(t = t, which_me = which_me)) %>% | |
| content("parsed") |> | |
| paste(Sys.time()) | |
| } | |
| plumber_future_slow <- function(t, which_me = "first") { | |
| connectApiUrl <- "http://127.0.0.1:9038" | |
| GET(connectApiUrl, path = "future_slow", | |
| query = list(t = t, which_me = which_me)) %>% | |
| content("parsed") |> | |
| paste(Sys.time()) | |
| } | |
| #* Perform slow computation | |
| #* Echo back the input | |
| #* @get /fast | |
| function(t = 10) { | |
| Sys.sleep(t) | |
| list(res = TRUE) | |
| } | |
| # Define UI for application that draws a histogram | |
| ui <- fluidPage( | |
| # Application title | |
| titlePanel("Sequences of computations"), | |
| # Sidebar with a slider input for number of bins | |
| sidebarLayout( | |
| sidebarPanel( | |
| sliderInput( | |
| "slow_t", | |
| "Seconds of slowliness of slow computations:", | |
| min = 1, | |
| max = 30, | |
| value = 5 | |
| ), | |
| checkboxInput("fut", "Use future?"), | |
| checkboxInput("plumb", "Use plumber?") | |
| ), | |
| # Show a plot of the generated distribution | |
| mainPanel( | |
| textOutput("first_fast"), | |
| textOutput("first_slow"), | |
| textOutput("second_slow"), | |
| textOutput("second_fast"), | |
| textOutput("sequential") | |
| ) | |
| ) | |
| ) | |
| # Define server logic required to draw a histogram | |
| server <- function(input, output) { | |
| output$first_fast <- renderText({ | |
| t_slow <- req(input$slow_t) | |
| if (input$fut) { | |
| if (input$plumb) { | |
| future::future(plumber_future_fast(t_slow, "First")) | |
| } else { | |
| future::future({fast(t_slow, "First")}) | |
| } | |
| } else { | |
| if (input$plumb) { | |
| plumber_fast(t_slow, "First") | |
| } else { | |
| fast(t_slow, "First") | |
| } | |
| } | |
| }) | |
| output$first_slow <- renderText({ | |
| t_slow <- req(input$slow_t) | |
| if (input$fut) { | |
| if (input$plumb) { | |
| future::future(plumber_future_slow(t_slow, "First")) | |
| } else { | |
| future::future({slow(t_slow, "First")}) | |
| } | |
| } else { | |
| if (input$plumb) { | |
| plumber_slow(t_slow, "First") | |
| } else { | |
| slow(t_slow, "First") | |
| } | |
| } | |
| }) | |
| output$second_slow <- renderText({ | |
| t_slow <- req(input$slow_t) | |
| if (input$fut) { | |
| if (input$plumb) { | |
| future::future(plumber_future_slow(t_slow, "Second")) | |
| } else { | |
| future::future({slow(t_slow, "Second")}) | |
| } | |
| } else { | |
| if (input$plumb) { | |
| plumber_slow(t_slow, "Second") | |
| } else { | |
| slow(t_slow, "Second") | |
| } | |
| } | |
| }) | |
| output$second_fast <- renderText({ | |
| t_slow <- req(input$slow_t) | |
| if (input$fut) { | |
| if (input$plumb) { | |
| future::future(plumber_future_fast(t_slow, "Second")) | |
| } else { | |
| future::future({fast(t_slow, "Second")}) | |
| } | |
| } else { | |
| if (input$plumb) { | |
| plumber_fast(t_slow, "Second") | |
| } else { | |
| fast(t_slow, "Second") | |
| } | |
| } | |
| }) | |
| output$sequential <- renderText({ | |
| t_slow <- req(input$slow_t) | |
| if (input$fut) { | |
| if (input$plumb) { | |
| future::future(plumber_future_fast(t_slow, "Second")) | |
| future::future(plumber_future_slow(t_slow, "Second")) | |
| future::future(plumber_future_slow(t_slow, "Second")) | |
| future::future(plumber_future_fast(t_slow, "Second")) | |
| "Sequential done." | |
| } else { | |
| future::future({fast(t_slow, "Second")}) | |
| future::future({slow(t_slow, "Second")}) | |
| future::future({slow(t_slow, "Second")}) | |
| future::future({fast(t_slow, "Second")}) | |
| "Sequential done." | |
| } | |
| } else { | |
| if (input$plumb) { | |
| plumber_fast(t_slow, "Second") | |
| plumber_slow(t_slow, "Second") | |
| plumber_slow(t_slow, "Second") | |
| plumber_fast(t_slow, "Second") | |
| "Sequential done." | |
| } else { | |
| fast(t_slow, "Second") | |
| slow(t_slow, "Second") | |
| slow(t_slow, "Second") | |
| fast(t_slow, "Second") | |
| "Sequential done." | |
| } | |
| } | |
| }) | |
| } | |
| # Run the application | |
| shinyApp(ui = ui, server = server) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| library(plumber) | |
| library(future) | |
| plan("multisession") | |
| #* @serializer text | |
| #* Perform fast computation | |
| #* @get /fast | |
| function(t, which_me = "First") { | |
| cat("fast\n") | |
| force(t) | |
| list(res = paste(which_me, "fast done.")) | |
| } | |
| #* @serializer text | |
| #* Perform slow computation | |
| #* @get /slow | |
| function(t, which_me = "First") { | |
| cat("slow\n") | |
| Sys.sleep(t) | |
| list(res = paste(which_me, "slow done.")) | |
| } | |
| #* @serializer text | |
| #* Perform fast computation | |
| #* @get /future_fast | |
| function(t, which_me = "First") { | |
| cat("future fast\n") | |
| future::future({ | |
| force(t) | |
| list(res = paste(which_me, "fast done.")) | |
| }) | |
| } | |
| #* @serializer text | |
| #* Perform slow computation | |
| #* @get /future_slow | |
| function(t, which_me = "First") { | |
| cat("future slow\n") | |
| future::future({ | |
| Sys.sleep(t) | |
| list(res = paste(which_me, "slow done.")) | |
| }) | |
| } |
Author
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
the twin usage of {future} (in the two scripts) permit both the app to query the APIs asynchronously (i.e. permitting to make multiple query even if a result is not yet returned), and the services to execute the computation queried asynchronously (i.e. allowing concurrent computation to be evaluated).
That way the (single) app-process would not be frozen while waiting for queried result to be returned from the services (queried through the APIs) and it can process "other" stuff in the meantime. On the same time, the (single) services-process would not be frozen while conducting a computation waiting for the result to execute the next one, and it can run many of them "simultaneously", returning to the app the results as soon as they are ready.
That said (as far as I have understood) the (single) app-process monitoring the outputs invalidation process, remain sequential and synchronous, so even if you can ask for multiple computation from the app at the "same" time, and the service can evaluate multiple query at the "same" time, and the service return tu the app results as soon as they are ready (so not necessarily in the same order they are queried), the app will show you the result in order because it still wait for an output to be updated before updates the next outdated one.
The real power of all of that is not (only) on the single user (that see the advantage of view all the result at the same time in the time of the first-slower one) but in multiple concurrent user using the same app, which do not use the same process for rendering and orchestrating the app (because it uses the services linked to the APIs) and can manage multiple queries from multiple users simultaneously, while computation happened simultaneously too, so that users do not need to wait each other's results before to see their own ones.