Skip to content

Commit

Permalink
modified: Julia/fitsDB.jl
Browse files Browse the repository at this point in the history
  • Loading branch information
jvo203 committed Jan 12, 2023
1 parent 13c1524 commit 034b2e2
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions Julia/fitsDB.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ function get_fits_total(conn, threshold)
# threshold is given in GB

# above the threshold
strSQL = "select sum(file_size) from cube where binf1=1 and binf2=1 and binf3=1 and binf4=1 and file_size>=$(threshold)*1024*1024*1024.;"
# strSQL = "select sum(file_size) from cube where binf1=1 and binf2=1 and binf3=1 and binf4=1 and file_size>=$(threshold)*1024*1024*1024.;"

# below the threshold but over 20GB
# strSQL = "select sum(file_size) from cube where binf1=1 and binf2=1 and binf3=1 and binf4=1 and file_size<$(threshold)*1024*1024*1024. and file_size>=20*1024*1024*1024.;"
strSQL = "select sum(file_size) from cube where binf1=1 and binf2=1 and binf3=1 and binf4=1 and file_size<$(threshold)*1024*1024*1024. and file_size>=20*1024*1024*1024.;"

res = execute(conn, strSQL)
data = columntable(res)
Expand All @@ -41,10 +41,10 @@ function get_datasets(conn, threshold)
# threshold is given in GB

# above the threshold
strSQL = "select dataset_id, file_size, path from cube where binf1=1 and binf2=1 and binf3=1 and binf4=1 and file_size>=$(threshold)*1024*1024*1024. order by file_size desc;"
# strSQL = "select dataset_id, file_size, path from cube where binf1=1 and binf2=1 and binf3=1 and binf4=1 and file_size>=$(threshold)*1024*1024*1024. order by file_size desc;"

# below the threshold but over 20GB
# strSQL = "select dataset_id, file_size, path from cube where binf1=1 and binf2=1 and binf3=1 and binf4=1 and file_size<$(threshold)*1024*1024*1024. and file_size>=20*1024*1024*1024. order by file_size desc;"
strSQL = "select dataset_id, file_size, path from cube where binf1=1 and binf2=1 and binf3=1 and binf4=1 and file_size<$(threshold)*1024*1024*1024. and file_size>=20*1024*1024*1024. order by file_size desc;"

res = execute(conn, strSQL)
data = columntable(res)
Expand Down Expand Up @@ -161,7 +161,7 @@ function preload_dataset(datasetid)
end

# then wait 30 seconds to allow for the 60s dataset timeout (avoid a RAM overload)
sleep(61) # or not ...
sleep(31) # or not ...
end

# conservative assumptions
Expand Down Expand Up @@ -205,7 +205,7 @@ for (datasetid, file_size, path) in zip(ids, sizes, paths)
# copy should be enabled for large datasets only
# otherwise we will run out of disk space
# println("COPY: #$count/$total_count :: $datasetid :: $(round(file_size / 1024^3,digits=1)) GB")
copy_dataset(datasetid, file_size, path)
# copy_dataset(datasetid, file_size, path)

# increment the index
count = count + 1
Expand Down

0 comments on commit 034b2e2

Please sign in to comment.