From 74d3f7042ab321c18df580cd87681dedc16a60f9 Mon Sep 17 00:00:00 2001 From: Soumyadipta2020 Date: Fri, 6 Sep 2024 22:38:11 +0530 Subject: [PATCH] phi 3.5 mini and llama 3.1 added. Small improvements. --- .gitignore | 3 + forecasting_tool/functions.R | 132 +++++++++++++++++++++ forecasting_tool/global.R | 217 +---------------------------------- forecasting_tool/helper.R | 32 ++++++ forecasting_tool/server.R | 32 +++--- forecasting_tool/theme.R | 80 +++++++++++++ forecasting_tool/ui.R | 7 +- 7 files changed, 270 insertions(+), 233 deletions(-) create mode 100644 forecasting_tool/functions.R create mode 100644 forecasting_tool/theme.R diff --git a/.gitignore b/.gitignore index e75435c..1d90092 100644 --- a/.gitignore +++ b/.gitignore @@ -47,3 +47,6 @@ po/*~ # RStudio Connect folder rsconnect/ + +# api file +api.R \ No newline at end of file diff --git a/forecasting_tool/functions.R b/forecasting_tool/functions.R new file mode 100644 index 0000000..a91caaa --- /dev/null +++ b/forecasting_tool/functions.R @@ -0,0 +1,132 @@ +#### mode #### +getmode <- function(v, na = TRUE) { + if(na == TRUE){ + v <- v[!is.na(v)] + } + uniqv <- unique(v) + uniqv[which.max(tabulate(match(v, uniqv)))] +} + +#### Forecasting functions #### +#### LSTM #### +lstm_forecast <- function(ts_data, horizon) { + library(keras) + + # Normalize the data + normalized_data <- scale(ts_data) + + # Split data into train and test sets + train_data <- normalized_data[1:(length(normalized_data) - horizon)] + test_data <- normalized_data[(length(normalized_data) - horizon + 1):length(normalized_data)] + + # Prepare the training data + train_x <- train_y <- list() + for (i in 1:(length(train_data) - horizon)) { + train_x[[i]] <- matrix(train_data[i:(i + horizon - 1)], nrow = horizon, ncol = 1) + train_y[[i]] <- train_data[(i + horizon)] + } + + train_x <- array_reshape(train_x, c(length(train_x), horizon, 1)) + train_y <- unlist(train_y) + + # Define the LSTM model architecture + model <- keras_model_sequential() + model %>% + layer_lstm(units = 50, input_shape = c(horizon, 1)) %>% + layer_dense(units = 1) + + # Compile the model + model %>% compile( + loss = "mean_squared_error", + optimizer = optimizer_adam() + ) + + # Train the model + model %>% fit( + train_x, train_y, + epochs = 100, + batch_size = 32, + verbose = 0 + ) + + # Make predictions for the test set + test_x <- array_reshape(test_data, dim = c(length(test_data) / horizon, horizon, 1)) + predicted_values <- model %>% predict(test_x) + + # Denormalize the predicted values + denormalized_values <- predicted_values * sd(ts_data) + mean(ts_data) + + # Create the forecast object + forecast_values <- ts(denormalized_values, frequency = 12) + + return(forecast_values) +} + +#### AutoML #### +automl_forecast <- function(ts_data, horizon) { + # Convert the time series data to a data frame + data_df <- data.frame(Date = as.numeric(time(ts_data)), Value = as.numeric(ts_data)) + + # Initialize the H2O cluster + h2o.init() + + # Convert the data frame to an H2O frame + h2o_df <- as.h2o(data_df, destination_frame = "ts_data") + + # Set the target variable + target <- "Value" + + # Train AutoML model + aml <- h2o.automl(x = setdiff(colnames(h2o_df), target), + y = target, + training_frame = h2o_df, + max_runtime_secs = 300, + max_models = 10) + + # Generate predictions for the future horizon + forecast_df <- data.frame(Date = seq(time(ts_data)[length(ts_data)] + 1/12, by = 1/12, length.out = horizon)) + forecast_h2o <- as.h2o(forecast_df, destination_frame = "forecast_data") + forecast_predictions <- h2o.predict(aml@leader, forecast_h2o) + + # Convert the predictions to a time series object + forecast_values <- ts(as.vector(forecast_predictions$predict), frequency = 12) + + # Shut down the H2O cluster + h2o.shutdown(prompt = FALSE) + + return(forecast_values) +} + +#### ARFIMA Forecast #### +arfima_forecast <- function(x, h){ + { + arfima_fit <- auto.arima(x, seasonal = FALSE, stepwise = FALSE, + approximation = FALSE, allowdrift = FALSE) + + # Extract AR and MA orders from the fitted model + ar_order <- arimaorder(arfima_fit)[1] # AR order + ma_order <- arimaorder(arfima_fit)[3] # MA order + + # Create dynamic ARFIMA specification + spec <- arfimaspec(mean.model = list(armaOrder = c(ar_order, ma_order), + include.mean = TRUE, arfima = TRUE)) + + result <- try({ + # Fit the model using the dynamic specification + garch_fit <- arfimafit(spec = spec, data = x) + + # Generate forecasts directly using arfimaforecast + forecast_values <- arfimaforecast(garch_fit, n.ahead = h) + }, silent = TRUE) + + if (inherits(result, "try-error")) { + forecast_vector <- rep(0, times = h) + } else { + # Extract the forecasted values as a vector + forecast_vector <- as.vector(forecast_values@forecast$seriesFor) + } + + + return(forecast_vector) + } +} \ No newline at end of file diff --git a/forecasting_tool/global.R b/forecasting_tool/global.R index 3530432..eabd0a6 100644 --- a/forecasting_tool/global.R +++ b/forecasting_tool/global.R @@ -46,6 +46,9 @@ library(tsfgrnn) library(fontawesome) source("helper.R") +source("theme.R") +source("api.R") +source("functions.R") # js scroll code #### jscode_1 <- ' @@ -55,220 +58,6 @@ jscode_1 <- ' } ' -# API #### -openai_api_key <<- "" -gemini_api_key <<- "" -claude_api_key <<- - "" -hugging_api_key <<- "" - #### data edit #### data_edit <<- data.frame(row = NA, col = NA, value = NA) -#### dashboard sidebar theme #### -dashboard_sidebar_theme <- create_theme( - adminlte_sidebar( - # dark_bg = "#006666", - # dark_hover_bg = "#202020" - ) -) - -#### dashboard theme #### -dashboard_body_theme <- create_theme( - theme = "cosmo", - bs_vars_button( - default_color = "#FFF", - default_bg = "#0066cc", - # default_border = "white", - border_radius_base = "10px" - ), - bs_vars_tabs( - border_color = "black", - active_link_hover_bg = "#CCE5FF" - ), - output_file = NULL -) - -#### user panel theme #### -myDashboardUser <- function (..., name = NULL, image = NULL, title = NULL, subtitle = NULL, - footer = NULL) -{ - if (!is.null(title)) { - line_1 <- paste0(name, " - ", title) - } - else { - line_1 <- name - } - if (!is.null(subtitle)) { - user_text <- shiny::tags$p(line_1, shiny::tags$small(subtitle)) - user_header_height <- NULL - } - else { - user_text <- shiny::tags$p(line_1) - user_header_height <- shiny::tags$script( - shiny::HTML("$(\".user-header\").css(\"height\", \"145px\")") - ) - } - userTag <- shiny::tagList( - shiny::tags$head( - shiny::tags$script("$(function() {\n - $('.dashboard-user').on('click', function(e){\n - e.stopPropagation();\n - });\n - });\n - ")), - # we need to add an id and the class `action-button` to this link - shiny::tags$a(id = "user_dropdown", - href = "#", - class = "dropdown-toggle action-button", - `data-toggle` = "dropdown", - shiny::tags$img(src = image, - class = "user-image", - alt = "User Image"), - shiny::tags$span(class = "hidden-xs", - name) - ), - shiny::tags$ul(class = "dropdown-menu dashboard-user", - shiny::tags$li(class = "user-header", - if (!is.null(user_header_height)) user_header_height, - shiny::tags$img(src = image, - class = "img-circle", - alt = "User Image", - style="border:red"), - user_text, - style="background:#0066cc"), - if (length(list(...)) > 0) - shiny::tags$li(class = "user-body", ...), - if (!is.null(footer)) - shiny::tags$li(class = "user-footer", footer) - ) - ) - userTag -} - - - -#### mode #### -getmode <- function(v, na = TRUE) { - if(na == TRUE){ - v <- v[!is.na(v)] - } - uniqv <- unique(v) - uniqv[which.max(tabulate(match(v, uniqv)))] -} - -#### Forecasting functions #### -#### LSTM #### -lstm_forecast <- function(ts_data, horizon) { - library(keras) - - # Normalize the data - normalized_data <- scale(ts_data) - - # Split data into train and test sets - train_data <- normalized_data[1:(length(normalized_data) - horizon)] - test_data <- normalized_data[(length(normalized_data) - horizon + 1):length(normalized_data)] - - # Prepare the training data - train_x <- train_y <- list() - for (i in 1:(length(train_data) - horizon)) { - train_x[[i]] <- matrix(train_data[i:(i + horizon - 1)], nrow = horizon, ncol = 1) - train_y[[i]] <- train_data[(i + horizon)] - } - - train_x <- array_reshape(train_x, c(length(train_x), horizon, 1)) - train_y <- unlist(train_y) - - # Define the LSTM model architecture - model <- keras_model_sequential() - model %>% - layer_lstm(units = 50, input_shape = c(horizon, 1)) %>% - layer_dense(units = 1) - - # Compile the model - model %>% compile( - loss = "mean_squared_error", - optimizer = optimizer_adam() - ) - - # Train the model - model %>% fit( - train_x, train_y, - epochs = 100, - batch_size = 32, - verbose = 0 - ) - - # Make predictions for the test set - test_x <- array_reshape(test_data, dim = c(length(test_data) / horizon, horizon, 1)) - predicted_values <- model %>% predict(test_x) - - # Denormalize the predicted values - denormalized_values <- predicted_values * sd(ts_data) + mean(ts_data) - - # Create the forecast object - forecast_values <- ts(denormalized_values, frequency = 12) - - return(forecast_values) -} - -#### AutoML #### -automl_forecast <- function(ts_data, horizon) { - # Convert the time series data to a data frame - data_df <- data.frame(Date = as.numeric(time(ts_data)), Value = as.numeric(ts_data)) - - # Initialize the H2O cluster - h2o.init() - - # Convert the data frame to an H2O frame - h2o_df <- as.h2o(data_df, destination_frame = "ts_data") - - # Set the target variable - target <- "Value" - - # Train AutoML model - aml <- h2o.automl(x = setdiff(colnames(h2o_df), target), - y = target, - training_frame = h2o_df, - max_runtime_secs = 300, - max_models = 10) - - # Generate predictions for the future horizon - forecast_df <- data.frame(Date = seq(time(ts_data)[length(ts_data)] + 1/12, by = 1/12, length.out = horizon)) - forecast_h2o <- as.h2o(forecast_df, destination_frame = "forecast_data") - forecast_predictions <- h2o.predict(aml@leader, forecast_h2o) - - # Convert the predictions to a time series object - forecast_values <- ts(as.vector(forecast_predictions$predict), frequency = 12) - - # Shut down the H2O cluster - h2o.shutdown(prompt = FALSE) - - return(forecast_values) -} - -#### ARFIMA FOrecast #### -arfima_forecast <- function(x, h){ - { - arfima_fit <- auto.arima(x, seasonal = FALSE, stepwise = FALSE, - approximation = FALSE, allowdrift = FALSE) - - # Extract AR and MA orders from the fitted model - ar_order <- arimaorder(arfima_fit)[1] # AR order - ma_order <- arimaorder(arfima_fit)[3] # MA order - - # Create dynamic ARFIMA specification - spec <- arfimaspec(mean.model = list(armaOrder = c(ar_order, ma_order), - include.mean = TRUE, arfima = TRUE)) - - # Fit the model using the dynamic specification - garch_fit <- arfimafit(spec = spec, data = x) - - # Generate forecasts directly using arfimaforecast - forecast_values <- arfimaforecast(garch_fit, n.ahead = h) - - # Extract the forecasted values as a vector - forecast_vector <- as.vector(forecast_values@forecast$seriesFor) - return(forecast_vector) - } -} \ No newline at end of file diff --git a/forecasting_tool/helper.R b/forecasting_tool/helper.R index 08dd73b..729453f 100644 --- a/forecasting_tool/helper.R +++ b/forecasting_tool/helper.R @@ -26,6 +26,38 @@ chat <- function(user_message, return(openai_chat_response$choices$message$content) } +# Nvidia chat #### +chat_nvidia <- function(user_message, + history = NULL, + api_key, + model_llm, + temp = 0.2, + topp = 0.7, + max_token = 1024) { + user_prompt <- list(list(role = "user", content = user_message)) + prompt <- c(history, user_prompt) |> purrr::compact() + + base_url <- "https://integrate.api.nvidia.com/v1" + body <- list(model = model_llm, messages = prompt, temperature = temp, top_p = topp, + max_tokens = max_token) + req <- + resp <- + request(base_url) |> + req_url_path_append("chat/completions") |> + req_auth_bearer_token(token = api_key) |> + req_headers("Content-Type" = "application/json") |> + req_user_agent("Soumyadipta Das") |> + req_body_json(body) |> + req_retry(max_tries = 4) |> + req_throttle(rate = 15) |> + req_perform() + + openai_chat_response <- + resp |> resp_body_json(simplifyVector = TRUE) + + return(openai_chat_response$choices$message$content) +} + # Google Gemini #### gemini <- function(prompt, temperature = 0.7, diff --git a/forecasting_tool/server.R b/forecasting_tool/server.R index 775c020..a88a4db 100644 --- a/forecasting_tool/server.R +++ b/forecasting_tool/server.R @@ -443,17 +443,7 @@ server <- function(input, output, session) { } else if (input$model == "State Space ARIMA") { forecast_values <- c(model$y, model$forecast) } else if (input$model == "ARFIMA") { - - result <- try({ - df <- model - }, silent = TRUE) - - if (inherits(result, "try-error")) { - forecast_values <- c(tsData(), rep(0, times = input$horizon)) - } else { forecast_values <- c(tsData(), model) - } - } else { forecast_values <- forecast(model, h = input$horizon) forecast_values <- c(fitted(model),forecast_values$mean) @@ -826,14 +816,15 @@ server <- function(input, output, session) { token = hugging_api_key, max_new_tokens = 1000 )[[1]][[1]] - } else if (input$model_gen == "microsoft-Phi-3-mini") { + } else if (input$model_gen == "Phi-3.5-mini") { response <- - create_completion_huggingface( - model = "microsoft/Phi-3-mini-4k-instruct", + chat_nvidia( + prompt, history = rv$chat_history, - prompt = prompt, - token = hugging_api_key - )[[1]][[1]] + temp = input$temperature, + api_key = nv_api_key, + model_llm = "microsoft/phi-3.5-mini-instruct" + ) } else if (input$model_gen == "Yi-1.5") { response <- create_completion_huggingface( @@ -843,6 +834,15 @@ server <- function(input, output, session) { token = hugging_api_key, max_new_tokens = 1000 )[[1]][[1]] + } else if (input$model_gen == "Meta-Llama-3.1") { + response <- + chat_nvidia( + prompt, + history = rv$chat_history, + temp = input$temperature, + api_key = nv_api_key, + model_llm = "meta/llama-3.1-405b-instruct" + ) } diff --git a/forecasting_tool/theme.R b/forecasting_tool/theme.R new file mode 100644 index 0000000..3f9e4c6 --- /dev/null +++ b/forecasting_tool/theme.R @@ -0,0 +1,80 @@ +#### dashboard sidebar theme #### +dashboard_sidebar_theme <- create_theme( + adminlte_sidebar( + # dark_bg = "#006666", + # dark_hover_bg = "#202020" + ) +) + +#### dashboard theme #### +dashboard_body_theme <- create_theme( + theme = "cosmo", + bs_vars_button( + default_color = "#FFF", + default_bg = "#0066cc", + # default_border = "white", + border_radius_base = "10px" + ), + bs_vars_tabs( + border_color = "black", + active_link_hover_bg = "#CCE5FF" + ), + output_file = NULL +) + +#### user panel theme #### +myDashboardUser <- function (..., name = NULL, image = NULL, title = NULL, subtitle = NULL, + footer = NULL) +{ + if (!is.null(title)) { + line_1 <- paste0(name, " - ", title) + } + else { + line_1 <- name + } + if (!is.null(subtitle)) { + user_text <- shiny::tags$p(line_1, shiny::tags$small(subtitle)) + user_header_height <- NULL + } + else { + user_text <- shiny::tags$p(line_1) + user_header_height <- shiny::tags$script( + shiny::HTML("$(\".user-header\").css(\"height\", \"145px\")") + ) + } + userTag <- shiny::tagList( + shiny::tags$head( + shiny::tags$script("$(function() {\n + $('.dashboard-user').on('click', function(e){\n + e.stopPropagation();\n + });\n + });\n + ")), + # we need to add an id and the class `action-button` to this link + shiny::tags$a(id = "user_dropdown", + href = "#", + class = "dropdown-toggle action-button", + `data-toggle` = "dropdown", + shiny::tags$img(src = image, + class = "user-image", + alt = "User Image"), + shiny::tags$span(class = "hidden-xs", + name) + ), + shiny::tags$ul(class = "dropdown-menu dashboard-user", + shiny::tags$li(class = "user-header", + if (!is.null(user_header_height)) user_header_height, + shiny::tags$img(src = image, + class = "img-circle", + alt = "User Image", + style="border:red"), + user_text, + style="background:#0066cc"), + if (length(list(...)) > 0) + shiny::tags$li(class = "user-body", ...), + if (!is.null(footer)) + shiny::tags$li(class = "user-footer", footer) + ) + ) + userTag +} \ No newline at end of file diff --git a/forecasting_tool/ui.R b/forecasting_tool/ui.R index a6bfece..a9eb314 100644 --- a/forecasting_tool/ui.R +++ b/forecasting_tool/ui.R @@ -385,10 +385,11 @@ ui <- shinydashboardPlus::dashboardPage( "model_gen", "AI Model", choices = c( + "Meta-Llama-3.1", "Meta-Llama-3", # "gpt-3.5-turbo", "gemini-pro", - "microsoft-Phi-3-mini", + "Phi-3.5-mini", # "claude-2.1", # "claude-instant", "google-gemma-7b-it", @@ -396,11 +397,11 @@ ui <- shinydashboardPlus::dashboardPage( "Mistral-v0.3", "Yi-1.5" ), - selected = "Meta-Llama-3-8B-Instruct" + selected = "Meta-Llama-3.1" ), sliderInput( "temperature", - "Temperature (gemini & gpt only)", + "Temperature (gemini, llama-3.1 & Phi only)", min = 0, max = 1, value = 0.5,