@@ -74,7 +74,7 @@ get_czso_catalogue <- function() {
74
74
timeout = 30000 ,
75
75
debug = " on" ,
76
76
run = " Run Query" )
77
- usethis :: ui_info(" Downloading " )
77
+ usethis :: ui_info(" Reading data from data.gov.cz " )
78
78
cat_rslt <- httr :: GET(sparql_url , query = params ,
79
79
# accept("application/sparql-results+json"),
80
80
httr :: user_agent(ua_string ),
@@ -83,13 +83,13 @@ get_czso_catalogue <- function() {
83
83
84
84
# print(params$query)
85
85
86
- usethis :: ui_info(" Reading data" )
87
86
if (httr :: status_code(cat_rslt ) > 200 ) {
88
87
print(httr :: http_status(cat_rslt ))
89
88
rslt <- httr :: content(cat_rslt , as = " text" )
90
89
} else
91
90
rslt <- cat_rslt %> % httr :: content(as = " text" )
92
91
rslt <- readr :: read_csv(rslt , col_types = readr :: cols(modified = " T" ))
92
+ usethis :: ui_done(" Done downloading and reading data" )
93
93
usethis :: ui_info(" Transforming data" )
94
94
rslt <- dplyr :: group_by(rslt , dataset_iri ) %> %
95
95
dplyr :: mutate(keywords = stringr :: str_c(keywords_all , collapse = " ; " )) %> %
@@ -173,7 +173,7 @@ get_czso_table <- function(dataset_id, force_redownload = F, resource_num = 1) {
173
173
dir.create(td , showWarnings = F , recursive = T )
174
174
dfile <- paste0(td , " /ds_" , dataset_id , " ." , ext )
175
175
if (file.exists(dfile ) & ! force_redownload ) {
176
- message( stringr :: str_glue (" File already in {td}, not downloading. Set `force_redownload` to TRUE if needed." ) )
176
+ usethis :: ui_info (" File already in {td}, not downloading. Set `force_redownload` to TRUE if needed." )
177
177
} else {
178
178
utils :: download.file(url , dfile , headers = ua_header )
179
179
}
0 commit comments