From c9014e4692e880eda75794543d733cdcb3e823c1 Mon Sep 17 00:00:00 2001 From: "Jared A. Lee" <44878591+jaredalee@users.noreply.github.com> Date: Fri, 5 Jul 2024 14:20:33 -0600 Subject: [PATCH 1/3] Update namelist.input Minor updates to the Hurricane Matthew test case WRF namelist, none of which will cause WRF to run any differently. The changes reduce potential confusion for users (only the first column is being read in this 1-domain simulation, but the dates in the second column were made the same as the first for clarity), and add commas to the end of the lines in which they were previously omitted (in case the absence of a comma confuses some text parsers). --- .../Hurricane_Matthew/WRF/namelist.input | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/use_cases/Hurricane_Matthew/WRF/namelist.input b/use_cases/Hurricane_Matthew/WRF/namelist.input index d38b541..598ccda 100644 --- a/use_cases/Hurricane_Matthew/WRF/namelist.input +++ b/use_cases/Hurricane_Matthew/WRF/namelist.input @@ -3,17 +3,17 @@ run_hours = 48, run_minutes = 0, run_seconds = 0, - start_year = 2016, 2019, - start_month = 10, 09, - start_day = 06, 04, - start_hour = 00, 12, - end_year = 2016, 2019, - end_month = 10, 09, - end_day = 08, 06, + start_year = 2016, 2016, + start_month = 10, 10, + start_day = 06, 06, + start_hour = 00, 00, + end_year = 2016, 2016, + end_month = 10, 10, + end_day = 08, 08, end_hour = 00, 00, interval_seconds = 21600, input_from_file = .true.,.true., - history_interval = 180, 60, + history_interval = 180, 180, frames_per_outfile = 1, 1, restart = .false., restart_interval = 1440, @@ -25,7 +25,7 @@ auxhist22_outname = "wrfout_zlev_d_", auxhist22_interval = 180, 180, frames_per_auxhist22 = 1, 1, - io_form_auxhist22 = 2 + io_form_auxhist22 = 2, auxhist23_outname = "wrfout_plev_d_", auxhist23_interval = 180, 180, frames_per_auxhist23 = 1, 1, @@ -53,7 +53,7 @@ parent_grid_ratio = 1, 3, parent_time_step_ratio = 1, 3, feedback = 1, - smooth_option = 0 + smooth_option = 0, / &physics @@ -112,10 +112,10 @@ &diags z_lev_diags = 1, num_z_levels = 6, - z_levels = -80,-100,-200,-300,-400,-500 + z_levels = -80,-100,-200,-300,-400,-500, p_lev_diags = 1, num_press_levels = 10, - press_levels = 92500,85000,70000,50000,40000,30000,25000,20000,15000,10000 + press_levels = 92500,85000,70000,50000,40000,30000,25000,20000,15000,10000, use_tot_or_hyd_p = 1, solar_diagnostics = 0, / From 1436900013ce54d8b510c44f68bb7b5da86a11b2 Mon Sep 17 00:00:00 2001 From: Ben Trumbore Date: Mon, 29 Jul 2024 11:18:01 -0400 Subject: [PATCH 2/3] Add Windows and Red Cloud versions of the Matthew tutorial (#60) * Roll back change to usecases.rst * Draft of the Jetstream2-Matthew tutorial instructions The instructions still need to be retested and the text vetted by Rich. It would be great if we could have a section at the end to validate or see the results. * Tests to see what Markdown format I should really use * Crazy-ass markdown language * Sigh * Switch the incorrect markdown to the correct, yet horrible, version * Edit text, fix typos and links * Make sure all commands work properly, add more info. * A few more tweaks before others view it * Revisions based on feedback and some procedural changes * Final edits before initial publication * fixed URL links, fixed header underline lengths, added link to matthew jetstream page from use cases page * added ID so heading can be linked using :ref: * use double underscores to create anonymous reference for links that have the same text * updated versions of actions to prevent node.js deprecated warnings * added orphan identifier to prevent warning that this page is not included in the toctree * fixed typos * Refactor the existing doc in preparation for adding METPlus doc * Add initial version of METPlus instructions These steps were migrated from work by George, modified to fit with the WRF instructions already present here. * Tweaks to documentation after full testing The final upper_air analysis still fails due to a lack of pressure level data files from the WRF simulation. * Get METPlus working Also moved around some of the commands and updated some text. * Edits from final testing pass on new and revised content * Final tweaks before creating a pull request * Add text about viewing the output of METPlus * changed METPlus to METplus * split long docker run commands into multiple lines for better readability * ignore auto-generated file * use env var to reference obs data volume to more easily adapt to other use cases * rewording and avoid using analysis to describe the METplus verification step to avoid confusion withe the METplus Analysis tools that will be added later to generate images * A few formatting tweaks after the code review * ignore auto-generated file * Refactor instructions in preparation for edits related to changes in how WRF is run. * Add a run script and config file and update the existing config file to work with new documentation * change run.sh permissions * Remove data downloading from run.sh * Finalize edits of the tutorial to use config files from new location and add data download steps. * Add stub of Windows version of Matthew tutorial * Add link to new Windows tutorial for Matthew * Change some setup commands to work for Windows * Fix numerous environment variable references and path connectors * version containing Windows Server 2022 instructions, which will be rolled back * Saving work before rebooting system * Continued editing, stopped in data download section * Update instructions for downloading data * Update the sections on running WRF and METplus. * Final tweaks to the text before creating a pull request * Add link to the Red Cloud version of the Matthew tutorial * Add the Red Cloud Matthew tutorial that Zilu created. * Fix compilation issues with Windows doc. * Fix compilation issues with Red Cloud RST code * Try to fix error with duplicate link text "request" --------- Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> --- .gitignore | 1 + docs/.gitignore | 1 + docs/Users_Guide/configuration.rst | 2 +- docs/Users_Guide/matthewjetstream.rst | 208 +++++++--- docs/Users_Guide/matthewredcloud.rst | 382 ++++++++++++++++++ docs/Users_Guide/matthewwindows.rst | 265 ++++++++++++ docs/Users_Guide/usecases.rst | 10 + .../Hurricane_Matthew/WRF/namelist.input | 62 +-- use_cases/Hurricane_Matthew/WRF/namelist.wps | 35 ++ use_cases/Hurricane_Matthew/WRF/run.sh | 60 +++ 10 files changed, 943 insertions(+), 83 deletions(-) create mode 100644 docs/Users_Guide/matthewredcloud.rst create mode 100644 docs/Users_Guide/matthewwindows.rst create mode 100644 use_cases/Hurricane_Matthew/WRF/namelist.wps create mode 100755 use_cases/Hurricane_Matthew/WRF/run.sh diff --git a/.gitignore b/.gitignore index a271a97..7564616 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ *~ .vs +/.DS_Store diff --git a/docs/.gitignore b/docs/.gitignore index e35d885..a1516d0 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -1 +1,2 @@ _build +/.DS_Store diff --git a/docs/Users_Guide/configuration.rst b/docs/Users_Guide/configuration.rst index d7022e6..95a0225 100644 --- a/docs/Users_Guide/configuration.rst +++ b/docs/Users_Guide/configuration.rst @@ -64,7 +64,7 @@ Use this for the vars_io.txt file for the Hurricane Matthew case, from the Githu https://github.com/NCAR/i-wrf/blob/main/use_cases/Hurricane_Matthew/WRF/vars_io.txt ^^^^^^^^^^^^^^^^^^^ -METPlus Config File +METplus Config File ^^^^^^^^^^^^^^^^^^^ For the METplus configuration file for the Hurricane Matthew case, please use this file on the Github repository: diff --git a/docs/Users_Guide/matthewjetstream.rst b/docs/Users_Guide/matthewjetstream.rst index 8f0ae79..58e868b 100644 --- a/docs/Users_Guide/matthewjetstream.rst +++ b/docs/Users_Guide/matthewjetstream.rst @@ -8,21 +8,24 @@ Running I-WRF On Jetstream2 with Hurricane Matthew Data Overview ======== -The following instructions can be used to run -the `I-WRF weather simulation program `_ +The following instructions can be used to run elements of +the `I-WRF weather simulation framework `_ from the `National Center for Atmospheric Research (NCAR) `_ +and the `Cornell Center for Advanced Computing `_. +The steps below run the `Weather Research & Forecasting (WRF) `_ model +and the `METplus `_ verification framework with data from `Hurricane Matthew `_ on the `Jetstream2 cloud computing platform `_. This exercise provides an introduction to using cloud computing platforms, -running computationally complex simulations and using containerized applications. +running computationally complex simulations and analyses, and using containerized applications. -Simulations like I-WRF often require greater computing resources +Simulations like WRF often require greater computing resources than you may have on your personal computer, but a cloud computing platform can provided the needed computational power. Jetstream2 is a national cyberinfrastructure resource that is easy to use and is available to researchers and educators. -This exercise runs the I-WRF program as a Docker "container", -which simplifies the set-up work needed to run the simulation. +This exercise runs the I-WRF programs as Docker "containers", +which simplifies the set-up work needed to run the simulation and verification. It is recommended that you follow the instructions in each section in the order presented to avoid encountering issues during the process. @@ -76,7 +79,7 @@ Create a Cloud Instance and Log In ================================== After you have logged in to Jetstream2 and added your allocation to your account, -you are ready to create the cloud instance where you will run the I-WRF simulation. +you are ready to create the cloud instance where you will run the simulation and verification. If you are not familiar with the cloud computing terms "image" and "instance", it is recommended that you `read about them `__ before proceeding. @@ -123,10 +126,10 @@ In either case you will need to know the location and name of the private SSH ke the IP address of your instance (found in the Exosphere web dashboard) and the default username on your instance, which is "exouser". -Once you are logged in to the web shell you can proceed to the +Once you are logged in to the instance you can proceed to the "Install Software and Download Data" section below. You will know that your login has been successful when the prompt has the form ``exouser@instance-name:~$``, -which indicates your username, the instance name, and your current working directory, followed by "$" +which indicates your username, the instance name, and your current working directory, followed by "$". Managing a Jetstream2 Instance ------------------------------ @@ -150,34 +153,81 @@ Increasing the number of CPUs (say, to flavor "m3.8") can make your computations But of course, doubling the number of CPUs doubles the cost per hour to run the instance, so Shelving as soon as you are done becomes even more important! -Install Software and Download Data -================================== +Preparing the Environment +========================= -With your instance created and running and you logged in to it through a Web Shell, -you can now install the necessary software and download the data to run the simulation. +With your instance created and running and you logged in to it through SSH, +you can now create the run folders, install Docker software and download the data to run the simulation and verification. You will only need to perform these steps once, as they essentially change the contents of the instance's disk and those changes will remain even after the instance is shelved and unshelved. -The following sections instruct you to issue numerous Linux commands in your web shell. +The following sections instruct you to issue numerous Linux commands in your shell. If you are not familiar with Linux, you may want to want to refer to `An Introduction to Linux `_ when working through these steps. The commands in each section can be copied using the button in the upper right corner -and then pasted into your web shell by right-clicking. +and then pasted into your shell by right-clicking. -If your web shell ever becomes unresponsive or disconnected from the instance, +If your shell ever becomes unresponsive or disconnected from the instance, you can recover from that situation by rebooting the instance. In the Exosphere dashboard page for your instance, in the Actions menu, select "Reboot". The process takes several minutes, after which the instance status will return to "Ready". -Install Docker and Get the I-WRF Image +Define Environment Variables +---------------------------- + +We will be using some environment variables throughout this exercise to +make sure that we refer to the same resource names and file paths wherever they are used. +Copy and paste the definitions below into your shell to define the variables before proceeding:: + + WRF_IMAGE=ncar/iwrf:latest + METPLUS_IMAGE=dtcenter/metplus-dev:develop + WORKING_DIR=/home/exouser + WRF_DIR=${WORKING_DIR}/wrf/20161006_00 + METPLUS_DIR=${WORKING_DIR}/metplus + WRF_CONFIG_DIR=${WORKING_DIR}/i-wrf/use_cases/Hurricane_Matthew/WRF + METPLUS_CONFIG_DIR=${WORKING_DIR}/i-wrf/use_cases/Hurricane_Matthew/METplus + OBS_DATA_VOL=data-matthew-input-obs + +Any time you open a new shell on your instance, you will need to perform this action +to redefine the variables before executing the commands that follow. + +Create the WRF and METplus Run Folders -------------------------------------- -As mentioned above, the I-WRF simulation application is provided as a Docker image that will run as a +The simulation is performed using a script that expects to run in a folder where it can create result files. +The first command below creates a folder (named "wrf") under the user's home directory, +and a sub-folder within "wrf" to hold the output of this simulation. +The subfolder is named "20161006_00", which is the beginning date and time of the simulation. +Similarly, a run folder named "metplus" must be created for the METplus process to use:: + + mkdir -p ${WRF_DIR} + mkdir -p ${METPLUS_DIR} + +Download Configuration Files +---------------------------- + +Both WRF and METplus require some configuration files to direct their behavior, +and those are downloaded from the I-WRF GitHub repository. +Some of those configuration files are then copied into the run folders. +These commands perform the necessary operations:: + + git clone https://github.com/NCAR/i-wrf ${WORKING_DIR}/i-wrf + cp ${WRF_CONFIG_DIR}/namelist.* ${WRF_DIR} + cp ${WRF_CONFIG_DIR}/vars_io.txt ${WRF_DIR} + cp ${WRF_CONFIG_DIR}/run.sh ${WRF_DIR} + +Install Docker and Pull Docker Objects +====================================== + +Install Docker +-------------- + +As mentioned above, the WRF and METplus software are provided as Docker images that will run as a `"container" `_ on your cloud instance. To run a Docker container, you must first install the Docker Engine on your instance. -You can then "pull" (download) the I-WRF image that will be run as a container. +You can then "pull" (download) the WRF and METplus images that will be run as containers. The `instructions for installing Docker Engine on Ubuntu `_ are very thorough and make a good reference, but we only need to perform a subset of those steps. @@ -186,74 +236,101 @@ then installs Docker:: curl --location https://bit.ly/3R3lqMU > install-docker.sh source install-docker.sh + rm install-docker.sh If a text dialog is displayed asking which services should be restarted, type ``Enter``. When the installation is complete, you can verify that the Docker command line tool works by asking for its version:: docker --version -Next, you must start the Docker daemon, which runs in the background and processes commands:: +The Docker daemon should start automatically, but it sometimes runs into issues. +First, check to see if the daemon started successfully:: - sudo service docker start + sudo systemctl --no-pager status docker -If that command appeared to succeed, you can confirm its status with this command:: +If you see a message saying the daemon failed to start because a "Start request repeated too quickly", +wait a few minutes and issue this command to try again to start it:: - sudo systemctl --no-pager status docker + sudo systemctl start docker + +If the command seems to succeed, confirm that the daemon is running using the status command above. +Repeat these efforts as necessary until it is started. -Once all of that is in order, you must pull the latest version of the I-WRF image onto your instance:: +Get the WRF and METplus Docker Images and the Observed Weather Data +------------------------------------------------------------------- - docker pull ncar/iwrf +Once Docker is running, you must pull the correct versions of the WRF and METplus images onto your instance:: -Get the Geographic Data ------------------------ + docker pull ${WRF_IMAGE} + docker pull ${METPLUS_IMAGE} -To run I-WRF on the Hurricane Matthew data set, you need a copy of the -geographic data representing the terrain in the area of the simulation. -These commands download an archive file containing that data, -uncompress the archive into a folder named "WPS_GEOG", and delete the archive file. -They take several minutes to complete:: +METplus is run to perform verification of the results of the WRF simulation using +observations gathered during Hurricane Matthew. +We download that data by pulling a Docker volume that holds it, +and then referencing that volume when we run the METplus Docker container. +The commands to pull and create the volume are:: + docker pull ncar/iwrf:${OBS_DATA_VOL}.docker + docker create --name ${OBS_DATA_VOL} ncar/iwrf:${OBS_DATA_VOL}.docker + +Download Data for WRF +===================== + +To run WRF on the Hurricane Matthew data set, you need to have +several data sets to support the computation. +The commands in these sections download archive files containing that data, +then uncompress the archives into folders. +The geographic data is large and takes several minutes to acquire, +while the other two data sets are smaller and are downloaded directly into the WRF run folder, +rather than the user's home directory. + +Get the geographic data representing the terrain in the area of the simulation:: + + cd ${WORKING_DIR} wget https://www2.mmm.ucar.edu/wrf/src/wps_files/geog_high_res_mandatory.tar.gz tar -xzf geog_high_res_mandatory.tar.gz rm geog_high_res_mandatory.tar.gz -Create the Run Folder ---------------------- +Get the case study data (GRIB2 files):: + + cd ${WRF_DIR} + wget https://www2.mmm.ucar.edu/wrf/TUTORIAL_DATA/matthew_1deg.tar.gz + tar -xvzf matthew_1deg.tar.gz + rm -f matthew_1deg.tar.gz -The simulation is performed using a script that must first be downloaded. -The script expects to run in a folder where it can download data files and create result files. -The instructions in this exercise create that folder in the user's home directory and name it "matthew". -The simulation script is called "run.sh". -The following commands create the empty folder and download the script into it, -then change its permissions so it can be run:: +Get the SST (Sea Surface Temperature) data:: - mkdir matthew - curl --location https://bit.ly/3KoBtRK > matthew/run.sh - chmod 775 matthew/run.sh + cd ${WRF_DIR} + wget https://www2.mmm.ucar.edu/wrf/TUTORIAL_DATA/matthew_sst.tar.gz + tar -xzvf matthew_sst.tar.gz + rm -f matthew_sst.tar.gz -Run I-WRF -========= +Run WRF +======= With everything in place, you are now ready to run the Docker container that will perform the simulation. The downloaded script runs inside the container, prints lots of status information, and creates output files in the run folder you created. -Execute this command to run the simulation in your web shell:: +Execute this command to run the simulation in your shell:: - time docker run --shm-size 14G -it -v ~/:/home/wrfuser/terrestrial_data -v ~/matthew:/tmp/hurricane_matthew ncar/iwrf:latest /tmp/hurricane_matthew/run.sh + docker run --shm-size 14G -it \ + -v ${WORKING_DIR}:/home/wrfuser/terrestrial_data \ + -v ${WRF_DIR}:/tmp/hurricane_matthew \ + ${WRF_IMAGE} /tmp/hurricane_matthew/run.sh The command has numerous arguments and options, which do the following: -* ``time docker run`` prints the runtime of the "docker run" command. +* ``docker run`` creates the container if needed and then runs it. * ``--shm-size 14G -it`` tells the command how much shared memory to use, and to run interactively in the shell. * The ``-v`` options map folders in your cloud instance to paths within the container. * ``ncar/iwrf:latest`` is the Docker image to use when creating the container. * ``/tmp/hurricane_matthew/run.sh`` is the location within the container of the script that it runs. The simulation initially prints lots of information while initializing things, then settles in to the computation. -The provided configuration simulates 12 hours of weather and takes under three minutes to finish on an m3.quad Jetstream2 instance. -Once completed, you can view the end of any of the output files to confirm that it succeeded:: +The provided configuration simulates 48 hours of weather and takes about 12 minutes to finish on an m3.quad Jetstream2 instance. +Once completed, you can view the end of an output file to confirm that it succeeded:: - tail matthew/rsl.out.0000 + tail ${WRF_DIR}/rsl.out.0000 The output should look something like this:: @@ -268,3 +345,32 @@ The output should look something like this:: Timing for Writing wrfout_d01_2016-10-06_12:00:00 for domain 1: 0.32534 elapsed seconds d01 2016-10-06_12:00:00 wrf: SUCCESS COMPLETE WRF +Run METplus +=========== + +After the WRF simulation has finished, you can run the METplus verification to compare the simulated results +to the actual weather observations during the hurricane. +The verification takes about five minutes to complete. +We use command line options to tell the METplus container several things, including where the observed data is located, +where the METplus configuration can be found, where the WRF output data is located, and where it should create its output files:: + + docker run --rm -it \ + --volumes-from ${OBS_DATA_VOL} \ + -v ${METPLUS_CONFIG_DIR}:/config \ + -v ${WORKING_DIR}/wrf:/data/input/wrf \ + -v ${METPLUS_DIR}:/data/output ${METPLUS_IMAGE} \ + /metplus/METplus/ush/run_metplus.py /config/PointStat_matthew.conf + +Progress information is displayed while the verification is performed. +**WARNING** log messages are expected because observations files are not available for every valid time and METplus is +configured to allow some missing inputs. An **ERROR** log message indicates that something went wrong. +METplus first converts the observation data files to a format that the MET tools can read using the MADIS2NC wrapper. +Point-Stat is run to generate statistics comparing METAR observations to surface-level model fields and +RAOB observations to "upper air" fields. +METplus will print its completion status when the processing finishes. + +The results of the METplus verification can be found in ``${WORKING_DIR}/metplus/point_stat``. +These files contain tabular output that can be viewed in a text editor. Turn off word wrapping for better viewing. +Refer to the MET User's Guide for more information about the +`Point-Stat output `_. +In the near future, this exercise will be extended to include instructions to visualize the results. diff --git a/docs/Users_Guide/matthewredcloud.rst b/docs/Users_Guide/matthewredcloud.rst new file mode 100644 index 0000000..78b850f --- /dev/null +++ b/docs/Users_Guide/matthewredcloud.rst @@ -0,0 +1,382 @@ +:orphan: + +.. _matthewredcloud: + +Running I-WRF On Red Cloud with Hurricane Matthew Data +****************************************************** + +Overview +======== + +The following instructions can be used to run elements of +the `I-WRF weather simulation framework `_ +from the `National Center for Atmospheric Research (NCAR) `_ +and the `Cornell Center for Advanced Computing `_. +The steps below run the `Weather Research & Forecasting (WRF) `_ model +and the `METplus `_ verification framework +with data from `Hurricane Matthew `_ +on the `Red Cloud cloud computing platform `_ +provided by Cornell Center for Advanced Computing (CAC). +This exercise provides an introduction to using cloud computing platforms, +running computationally complex simulations and analyses, and using containerized applications. + +Simulations like WRF often require greater computing resources +than you may have on your personal computer, +but a cloud computing platform can provide the needed computational power. +Red Cloud is a subscription-based Infrastructure as a Service cloud that provides +root access to virtual servers and on-demand storage to Cornell researchers. +This exercise runs the I-WRF program as a Docker "container", +which simplifies the set-up work needed to run the simulation. + +It is recommended that you follow the instructions in each section in the order presented +to avoid encountering issues during the process. +Most sections refer to external documentation to provide details about the necessary steps +and to offer additional background information. + +Prepare to Use Red Cloud +======================== + +To `get started with Red Cloud `_, +you will need to: + +* Get a CAC account by doing one of the following: + + * Start a new project by making a `project request `_ (Only available for Cornell Faculty and Staff). + * Join an existing project by `request to be added to a project `_. + * Request an exploratory account by `submitting a request `_. + +* Log in to Red Cloud's OpenStack interface. + +The sections below will guide you through this process. +For an overview of Red Cloud, read Cornell TechDocs `Red Cloud documentation `_. + +Start a Project +--------------- + +One way to create a CAC account is to request a project. +Note that you must be a Cornell faculty member or a staff member to view the pages below and start a project. +You may submit a `project request `_ at the CAC website. +Thoroughly review the `rates `_ page to understand the Red Cloud subscription service. +Once your project is approved, you can `manage your project `_, and +read `this page `_ to learn how to manage a project. + +Join a Project +-------------- + +To join an existing project, submit a `join request `_. +You should only do this if your PI has requested you to submit the request. +Once the PI of the project approves the request, an email is sent to you with the login information. + +Open an Exploratory Account +--------------------------- + +You may also request an exploratory account if you have not made one already. +This account has limited computing hours and storage but is sufficient for this exercise. +To request an exploratory account, submit a `request `_. +You are also given one hour of free consulting for any help you may need. + +Log in to Red Cloud OpenStack Interface +--------------------------------------- + +Once you are given a CAC account login information, +you can log into the `Red Cloud OpenStack web interface `_. +Note that you need to be on a project with a subscription to log in successfully. + +Create a Cloud Instance and Log In +================================== + +After you have logged in to the Red Cloud OpenStack interface, +you are ready to create the cloud instance where you will run the I-WRF simulation. +If you are not familiar with the cloud computing terms "image" and "instance", +it is recommended that you read about them `here `__ +and `here `__ before proceeding. + +Create an SSH Key +----------------- + +You can either upload a public SSH key to Red Cloud or generate an SSH key pair on Red Cloud before creating your instance. +Red Cloud injects the uploaded public key or generated public key into the instance's default user account, +and you will need to provide the matching private SSH key to log in to the instance. +If you are not familiar with "SSH key pairs", you should +`read about them `__ before continuing. + +* First, `create an SSH Key on your computer `_ using the "ssh-keygen" command. That command allows you to specify the name of the private key file it creates, with the default being "id_rsa". The matching public key file is saved and named with ".pub" appended to the filename. +* Then, `import the public key to Red Cloud `_ through the Red Cloud web interface. + +Alternatively, you can `create a key pair on Red Cloud `_. Be sure to follow the steps and save the private key it generated with the correct format and permission before proceeding. + +Create an Instance +------------------ + +The Cornell TechDocs `Creating a New Linux Instance `_ +provides detailed information about creating a Linux instance on Red Cloud. +While following those steps, be sure to make the following choices for this instance: + +* When choosing an image as the instance source: + + * Select Boot from Source is "Image" + * Volume Size (GB) is 100 + * Delete Volume on Instance Delete is "Yes" + * Select the "ubuntu-22.04-LTS" image + +* In Flavor, choose the "Flavor" c4.m32 (4 Virtual CPUs) to provide a faster simulation run-time. +* In Network, select "public". +* In Key Pair, select the SSH public key that you uploaded previously. + +When all the required options are selected, click on the "Launch Instance" button, and wait for the instance to enter the "Active" state. +Note that the instance will not only be created, but will be running so that you can log in right away. + +Log in to the Instance +---------------------- + +The instructions for `connecting to Red Cloud Linux instances using SSH `_ +can be executed in the Command Prompt on Windows (from the Start menu, type "cmd" and select Command Prompt) +or from the Terminal application on a Mac. + +In either case, you will need to know the location and name of the private SSH key created on your computer (see above), +the IP address of your instance (found in the Red Cloud OpenStack interface) +and the default username on your instance, which is "ubuntu". + +Once you are logged in to the instance you can proceed to the +"Install Software and Download Data" section below. +You will know that your login has been successful when the prompt has the form ``ubuntu@instance-name:~$``, +which indicates your username, the instance name, and your current working directory, followed by "$" + +Managing a Red Cloud Instance +------------------------------ + +In order to use cloud computing resources efficiently, you must know how to +`manage your instances `_. +Instances incur costs whenever they are running (on Red Cloud, this is when they are "Active"). +"Shelving" an instance stops it from using the cloud's CPUs and memory, +and therefore stops it from incurring any charges against your project. + +When you are through working on this exercise, +be sure to use the instance's dropdown menu in the web interface to +"Shelve" the instance so that it is no longer spending your computing hours. +If you later return to the web interface and want to use the instance again, +Use the dropdown menu's "Unshelve Instance" option to start the instance up again. +Note that any programs that were running when you shelve the instance will be lost, +but the contents of the disk are preserved when shelving. + +You may also want to try the "Resize" action to change the number of CPUs of the instance. +Increasing the number of CPUs (say, to flavor "c8.m64") can make your computations finish more quickly. +But of course, doubling the number of CPUs doubles the cost per hour to run the instance, +so Shelving as soon as you are done becomes even more important! + +Preparing the Environment +========================= + +With your instance created and running and you logged in to it through SSH, +you can now install the necessary software and download the data to run the simulation. +You will only need to perform these steps once, +as they essentially change the contents of the instance's disk +and those changes will remain even after the instance is shelved and unshelved. + +The following sections instruct you to issue numerous Linux commands in your shell. +If you are not familiar with Linux, you may want to want to refer to +`An Introduction to Linux `_ when working through these steps. +The commands in each section can be copied using the button in the upper right corner +and then pasted into your web shell by right-clicking. + +Define Environment Variables +---------------------------- + +We will be using some environment variables throughout this exercise to +make sure that we refer to the same resource names and file paths wherever they are used. +Copy and paste the definitions below into your shell to define the variables before proceeding:: + + WRF_IMAGE=ncar/iwrf:latest + METPLUS_IMAGE=dtcenter/metplus-dev:develop + WORKING_DIR=/home/ubuntu + WRF_DIR=${WORKING_DIR}/wrf/20161006_00 + METPLUS_DIR=${WORKING_DIR}/metplus + WRF_CONFIG_DIR=${WORKING_DIR}/i-wrf/use_cases/Hurricane_Matthew/WRF + METPLUS_CONFIG_DIR=${WORKING_DIR}/i-wrf/use_cases/Hurricane_Matthew/METplus + OBS_DATA_VOL=data-matthew-input-obs + +Any time you open a new shell on your instance, you will need to perform this action +to redefine the variables before executing the commands that follow. + +Create the WRF and METplus Run Folders +-------------------------------------- + +The simulation is performed using a script that expects to run in a folder where it can create result files. +The first command below creates a folder (named "wrf") under the user's home directory, +and a sub-folder within "wrf" to hold the output of this simulation. +The subfolder is named "20161006_00", which is the beginning date and time of the simulation. +Similarly, a run folder named "metplus" must be created for the METplus process to use:: + + mkdir -p ${WRF_DIR} + mkdir -p ${METPLUS_DIR} + +Download Configuration Files +---------------------------- + +Both WRF and METplus require some configuration files to direct their behavior, +and those are downloaded from the I-WRF GitHub repository. +Some of those configuration files are then copied into the run folders. +These commands perform the necessary operations:: + + git clone https://github.com/NCAR/i-wrf ${WORKING_DIR}/i-wrf + cp ${WRF_CONFIG_DIR}/namelist.* ${WRF_DIR} + cp ${WRF_CONFIG_DIR}/vars_io.txt ${WRF_DIR} + cp ${WRF_CONFIG_DIR}/run.sh ${WRF_DIR} + +Install Docker and Pull Docker Objects +====================================== + +Install Docker +-------------- + +As mentioned above, the WRF and METplus software are provided as Docker images that will run as a +`"container" `_ +on your cloud instance. +To run a Docker container, you must first install the Docker Engine on your instance. +You can then "pull" (download) the WRF and METplus images that will be run as containers. + +The `instructions for installing Docker Engine on Ubuntu `_ +are very thorough and make a good reference, but we only need to perform a subset of those steps. +These commands run a script that sets up the Docker software repository on your instance, +then installs Docker:: + + curl --location https://bit.ly/3R3lqMU > install-docker.sh + source install-docker.sh + rm install-docker.sh + +If a text dialog is displayed asking which services should be restarted, type ``Enter``. +When the installation is complete, you can verify that the Docker command line tool works by asking for its version:: + + docker --version + +The Docker daemon should start automatically, but it sometimes runs into issues. +First, check to see if the daemon started successfully:: + + sudo systemctl --no-pager status docker + +If you see a message saying the daemon failed to start because a "Start request repeated too quickly", +wait a few minutes and issue this command to try again to start it:: + + sudo systemctl start docker + +If the command seems to succeed, confirm that the daemon is running using the status command above. +Repeat these efforts as necessary until it is started. + +Get the WRF and METplus Docker Images and the Observed Weather Data +------------------------------------------------------------------- + +Once Docker is running, you must pull the correct versions of the WRF and METplus images onto your instance:: + + sudo docker pull ${WRF_IMAGE} + sudo docker pull ${METPLUS_IMAGE} + +METplus is run to perform verification of the results of the WRF simulation using +observations gathered during Hurricane Matthew. +We download that data by pulling a Docker volume that holds it, +and then referencing that volume when we run the METplus Docker container. +The commands to pull and create the volume are:: + + sudo docker pull ncar/iwrf:${OBS_DATA_VOL}.docker + sudo docker create --name ${OBS_DATA_VOL} ncar/iwrf:${OBS_DATA_VOL}.docker + +Download Data for WRF +===================== + +To run WRF on the Hurricane Matthew data set, you need to have +several data sets to support the computation. +The commands in these sections download archive files containing that data, +then uncompress the archives into folders. +The geographic data is large and takes several minutes to acquire, +while the other two data sets are smaller and are downloaded directly into the WRF run folder, +rather than the user's home directory. + +Get the geographic data representing the terrain in the area of the simulation:: + + cd ${WORKING_DIR} + wget https://www2.mmm.ucar.edu/wrf/src/wps_files/geog_high_res_mandatory.tar.gz + tar -xzf geog_high_res_mandatory.tar.gz + rm geog_high_res_mandatory.tar.gz + +Get the case study data (GRIB2 files):: + + cd ${WRF_DIR} + wget https://www2.mmm.ucar.edu/wrf/TUTORIAL_DATA/matthew_1deg.tar.gz + tar -xvzf matthew_1deg.tar.gz + rm -f matthew_1deg.tar.gz + +Get the SST (Sea Surface Temperature) data:: + + cd ${WRF_DIR} + wget https://www2.mmm.ucar.edu/wrf/TUTORIAL_DATA/matthew_sst.tar.gz + tar -xzvf matthew_sst.tar.gz + rm -f matthew_sst.tar.gz + +Run WRF +======= + +With everything in place, you are now ready to run the Docker container that will perform the simulation. +The downloaded script runs inside the container, prints lots of status information, +and creates output files in the run folder you created. +Execute this command to run the simulation in your shell:: + + sudo docker run --shm-size 14G -it \ + -v ${WORKING_DIR}:/home/wrfuser/terrestrial_data \ + -v ${WRF_DIR}:/tmp/hurricane_matthew \ + ${WRF_IMAGE} /tmp/hurricane_matthew/run.sh + +The command has numerous arguments and options, which do the following: + +* ``docker run`` creates the container if needed and then runs it. +* ``--shm-size 14G -it`` tells the command how much shared memory to use, and to run interactively in the shell. +* The ``-v`` options map folders in your cloud instance to paths within the container. +* ``ncar/iwrf:latest`` is the Docker image to use when creating the container. +* ``/tmp/hurricane_matthew/run.sh`` is the location within the container of the script that it runs. + +The simulation initially prints lots of information while initializing things, then settles in to the computation. +The provided configuration simulates 48 hours of weather and takes about 26 minutes to finish on a c4.m32 Red Cloud instance. +Once completed, you can view the end of an output file to confirm that it succeeded:: + + tail ${WRF_DIR}/rsl.out.0000 + +The output should look something like this:: + + Timing for main: time 2016-10-06_11:42:30 on domain 1: 0.23300 elapsed seconds + Timing for main: time 2016-10-06_11:45:00 on domain 1: 0.23366 elapsed seconds + Timing for main: time 2016-10-06_11:47:30 on domain 1: 2.77688 elapsed seconds + Timing for main: time 2016-10-06_11:50:00 on domain 1: 0.23415 elapsed seconds + Timing for main: time 2016-10-06_11:52:30 on domain 1: 0.23260 elapsed seconds + Timing for main: time 2016-10-06_11:55:00 on domain 1: 0.23354 elapsed seconds + Timing for main: time 2016-10-06_11:57:30 on domain 1: 0.23345 elapsed seconds + Timing for main: time 2016-10-06_12:00:00 on domain 1: 0.23407 elapsed seconds + Timing for Writing wrfout_d01_2016-10-06_12:00:00 for domain 1: 0.32534 elapsed seconds + d01 2016-10-06_12:00:00 wrf: SUCCESS COMPLETE WRF + +Run METplus +=========== + +After the WRF simulation has finished, you can run the METplus verification to compare the simulated results +to the actual weather observations during the hurricane. +The verification takes about five minutes to complete. +We use command line options to tell the METplus container several things, including where the observed data is located, +where the METplus configuration can be found, where the WRF output data is located, and where it should create its output files:: + + sudo docker run --rm -it \ + --volumes-from ${OBS_DATA_VOL} \ + -v ${METPLUS_CONFIG_DIR}:/config \ + -v ${WORKING_DIR}/wrf:/data/input/wrf \ + -v ${METPLUS_DIR}:/data/output ${METPLUS_IMAGE} \ + /metplus/METplus/ush/run_metplus.py /config/PointStat_matthew.conf + +Progress information is displayed while the verification is performed. +**WARNING** log messages are expected because observations files are not available for every valid time and METplus is +configured to allow some missing inputs. An **ERROR** log message indicates that something went wrong. +METplus first converts the observation data files to a format that the MET tools can read using the MADIS2NC wrapper. +Point-Stat is run to generate statistics comparing METAR observations to surface-level model fields and +RAOB observations to "upper air" fields. +METplus will print its completion status when the processing finishes. + +The results of the METplus verification can be found in ``${WORKING_DIR}/metplus/point_stat``. +These files contain tabular output that can be viewed in a text editor. Turn off word wrapping for better viewing. +Refer to the MET User's Guide for more information about the +`Point-Stat output `_. +In the near future, this exercise will be extended to include instructions to visualize the results. diff --git a/docs/Users_Guide/matthewwindows.rst b/docs/Users_Guide/matthewwindows.rst new file mode 100644 index 0000000..c0a7266 --- /dev/null +++ b/docs/Users_Guide/matthewwindows.rst @@ -0,0 +1,265 @@ +:orphan: + +.. _matthewwindows: + +Running I-WRF On Windows (Intel CPU) with Hurricane Matthew Data +**************************************************************** + +Overview +======== + +The following instructions can be used to run elements of +the `I-WRF weather simulation framework `_ +from the `National Center for Atmospheric Research (NCAR) `_ +and the `Cornell Center for Advanced Computing `_. +The steps below run the `Weather Research & Forecasting (WRF) `_ model +and the `METplus `_ verification framework +with data from `Hurricane Matthew `_ +on a Windows computer with an Intel CPU. +This exercise provides an introduction to using cloud computing platforms, +running computationally complex simulations and analyses, and using containerized applications. + +Simulations like WRF often require significant computing resources, +so it is recommended that the computer you use have at least four cores, 32 GB of RAM, and 50 Gb of available disk space. +This exercise runs the I-WRF programs as Docker "containers", +which simplifies the set-up work needed to run the simulation and verification. +However, the code used to build those Docker containers was compiled expressly for use on +`Intel CPUs `_, +so the Windows 10 or 11 computer you use must contain an Intel processor +(note that these instructions are not intended for use on a system running Windows Server). +Your Windows account will also need to have administrative privileges in order to perform all necessary steps. + +It is recommended that you follow the instructions in each section in the order presented +to avoid encountering issues during the process. +Most sections refer to external documentation to provide details about the necessary steps +and to offer additional background information. + +Preparing the Environment +========================= + +You will now create the run folders, install the software and download the data +that are needed to run the simulation and verification. +You will only need to perform these steps once. +The following sections instruct you to issue numerous DOS commands in a Windows "Command Prompt" shell. +To open such a shell: + +* Click the Start icon and then type "cmd" to display matching commands. +* Right click on the "Command Prompt" option that is shown and select "Run as administrator". +* A black shell window should open. + +Define Environment Variables +---------------------------- + +We will be using some environment variables throughout this exercise to +make sure that we refer to the same resource names and file paths wherever they are used. +The first variable you need to define will specify the location of the "working directory" for the data and run folders. +The example command below specifies that the working directory is the home directory of a hypothetical username "exercise". +You will need to enter a command similar to this that either specifies *your* user account name instead of "exercise", +or changes the path entirely to use a different location on your computer:: + + set WORKING_DIR=C:\Users\exercise + +Now you can copy and paste the definitions below into your shell to define the other variables before proceeding:: + + set WRF_IMAGE=ncar/iwrf:latest + set METPLUS_IMAGE=dtcenter/metplus-dev:develop + set WRF_DIR=%WORKING_DIR%\wrf\20161006_00 + set METPLUS_DIR=%WORKING_DIR%\metplus + set WRF_CONFIG_DIR=%WORKING_DIR%\i-wrf-main\use_cases\Hurricane_Matthew\WRF + set METPLUS_CONFIG_DIR=%WORKING_DIR%\i-wrf-main\use_cases\Hurricane_Matthew\METplus + set OBS_DATA_VOL=data-matthew-input-obs + +Any time you open a new shell on your instance, you will need to perform this action +to redefine the variables before executing the commands that follow. + +Create the WRF and METplus Run Folders +-------------------------------------- + +The simulation is performed using a script that expects to run in a folder where it can create result files. +The first command below creates a folder (named "wrf") under the user's home directory, +and a sub-folder within "wrf" to hold the output of this simulation. +The subfolder is named "20161006_00", which is the beginning date and time of the simulation. +Similarly, a run folder named "metplus" must be created for the METplus process to use:: + + mkdir %WRF_DIR% + mkdir %METPLUS_DIR% + +Download Configuration Files +---------------------------- + +Both WRF and METplus require some configuration files to direct their behavior, +and those must be downloaded from GitHub: + +* In a browser, visit the `I-WRF GitHub repository `_. +* Expand the green button ``<> Code`` button, and select Download ZIP. +* After the ZIP file has been downloaded, open it and extract its contents to the working directory you have selected as a folder named "i-wrf-main" (the default). Be careful not to include two levels of "i-wrf-main" folders in the path! + +Now, some of the configuration files must be copied into the WRF run folder. +These commands perform the necessary operations:: + + copy /y %WRF_CONFIG_DIR%\namelist.* %WRF_DIR% + copy /y %WRF_CONFIG_DIR%\vars_io.txt %WRF_DIR% + copy /y %WRF_CONFIG_DIR%\run.sh %WRF_DIR% + +Install Docker and Pull Docker Objects +====================================== + +As mentioned above, the WRF and METplus software are provided as Docker images that will run as a +`"container" `_ +on your cloud instance. +To run a Docker container, you must first install the Docker Engine on your instance. +You can then "pull" (download) the WRF and METplus images that will be run as containers. + +Install Docker Desktop +---------------------- + +In order to install Docker on your Windows computer, one or more Windows services must be enabled +(these services allow virtualization and running of containers). +The `process for performing this setup and installation `_ +is outlined below. +During the setup process your computer may reboot one or more times, +so be sure to save all work and close your other applications before beginning the setup. + +To install Docker and enable the required components on Windows 10/11, +you will install the Docker Desktop for Windows application by following these steps: + +* In a web browser, visit `Install Docker Desktop on Windows `_. +* Click on ``Docker Desktop for Windows - x86_64`` to download the installer. +* Run the installer file "Docker Desktop Installer.exe", which will require a system restart. +* Leave the "Use WSL 2 instead of Hyper-V" option checked in the dialog that appears. +* After the installation is complete, use the Start menu to find and run Docker Desktop, then agree to the terms and complete the other steps in the "first use" wizard. + +The Docker Desktop app should now show a green "Engine running" status in the lower left corner. +If your engine isn't running or you encounter any other issues, +visit the `Troubleshoot Docker Desktop page `_. + +Get the WRF and METplus Docker Images and the Observed Weather Data +------------------------------------------------------------------- + +Once Docker is running, you must pull the correct versions of the WRF and METplus images onto your instance. +Open a Command Prompt shell as done before, execute the commands to define the environment variables, and then issue these commands:: + + docker pull %WRF_IMAGE% + docker pull %METPLUS_IMAGE% + +METplus is run to perform verification of the results of the WRF simulation using +observations gathered during Hurricane Matthew. +We download that data by pulling a Docker volume on which the data resides, +then creating a container from that volume, +and then referencing that volume when we run the METplus Docker container. +The commands to pull the volume and create a container for it are:: + + docker pull ncar/iwrf:%OBS_DATA_VOL%.docker + docker create --name %OBS_DATA_VOL% ncar/iwrf:%OBS_DATA_VOL%.docker + +Download Data for WRF +===================== + +To run WRF on the Hurricane Matthew data, you need to have +three data sets to support the computation. +The commands in this section download archive files containing that data, +then uncompress the archives into folders. +The geographic data is large and takes several minutes to acquire, +while the other two data sets are smaller and are downloaded directly into the WRF run folder, +rather than the main working directory. + +The steps to process each data set are the same: + +* Visit the data set's URL in a web browser, which will download the .tar.gz file. +* Unzip the .tar.gz file contents into the destination folder. +* Remove the downloaded .tar.gz file. + +Begin by download all of the data sets in this table: + ++-------------------+----------------------------------------------------------------------------+---------------+ +| Data Set | URL | Destination | ++===================+============================================================================+===============+ +| Terrain | https://www2.mmm.ucar.edu/wrf/src/wps_files/geog_high_res_mandatory.tar.gz | %WORKING_DIR% | ++-------------------+----------------------------------------------------------------------------+---------------+ +| Case study | https://www2.mmm.ucar.edu/wrf/TUTORIAL_DATA/matthew_1deg.tar.gz | %WRF_DIR% | ++-------------------+----------------------------------------------------------------------------+---------------+ +| Sea Surface Temps | https://www2.mmm.ucar.edu/wrf/TUTORIAL_DATA/matthew_sst.tar.gz | %WRF_DIR% | ++-------------------+----------------------------------------------------------------------------+---------------+ + +Now, in your command prompt window, change directory ("cd") to the folder where those files were downloaded. +Then, copy/paste the commands below to unzip the data and delete the downloaded files:: + + tar -xzf geog_high_res_mandatory.tar.gz -C %WORKING_DIR% + del geog_high_res_mandatory.tar.gz + + tar -xzf matthew_1deg.tar.gz -C %WRF_DIR% + del -f matthew_1deg.tar.gz + + tar -xzf matthew_sst.tar.gz -C %WRF_DIR% + del -f matthew_sst.tar.gz + +Run WRF +======= + +With everything in place, you are now ready to run the Docker container that will perform the simulation. +The downloaded script runs inside the container, prints lots of status information, +and creates output files in the run folder you created. +Execute this command to run the simulation in your shell:: + + docker run --shm-size 14G -it ^ + -v %WORKING_DIR%:/home/wrfuser/terrestrial_data ^ + -v %WRF_DIR%:/tmp/hurricane_matthew ^ + %WRF_IMAGE% /tmp/hurricane_matthew/run.sh + +The command has numerous arguments and options, which do the following: + +* ``docker run`` creates the container if needed and then runs it. +* ``--shm-size 14G -it`` tells the command how much shared memory to use, and to run interactively in the shell. +* The ``-v`` options map folders in your cloud instance to paths within the container. +* ``ncar/iwrf:latest`` is the Docker image to use when creating the container. +* ``/tmp/hurricane_matthew/run.sh`` is the location within the container of the script that it runs. + +The simulation initially prints lots of information while initializing things, then settles in to the computation. +The provided configuration simulates 48 hours of weather and should take less than 30 minutes to finish, +depending on your CPU's number of cores and clock speed. +Once completed, you can view the end of an output file to confirm that it succeeded:: + + powershell -command "& {Get-Content %WRF_DIR%\rsl.out.0000 | Select-Object -last 10}" + +The output should look something like this:: + + Timing for main: time 2016-10-06_11:42:30 on domain 1: 0.23300 elapsed seconds + Timing for main: time 2016-10-06_11:45:00 on domain 1: 0.23366 elapsed seconds + Timing for main: time 2016-10-06_11:47:30 on domain 1: 2.77688 elapsed seconds + Timing for main: time 2016-10-06_11:50:00 on domain 1: 0.23415 elapsed seconds + Timing for main: time 2016-10-06_11:52:30 on domain 1: 0.23260 elapsed seconds + Timing for main: time 2016-10-06_11:55:00 on domain 1: 0.23354 elapsed seconds + Timing for main: time 2016-10-06_11:57:30 on domain 1: 0.23345 elapsed seconds + Timing for main: time 2016-10-06_12:00:00 on domain 1: 0.23407 elapsed seconds + Timing for Writing wrfout_d01_2016-10-06_12:00:00 for domain 1: 0.32534 elapsed seconds + d01 2016-10-06_12:00:00 wrf: SUCCESS COMPLETE WRF + +Run METplus +=========== + +After the WRF simulation has finished, you can run the METplus verification to compare the simulated results +to the actual weather observations during the hurricane. +The verification takes about five minutes to complete. +We use command line options to tell the METplus container several things, including where the observed data is located, +where the METplus configuration can be found, where the WRF output data is located, and where it should create its output files:: + + docker run --rm -it ^ + --volumes-from %OBS_DATA_VOL% ^ + -v %METPLUS_CONFIG_DIR%:/config ^ + -v %WORKING_DIR%\wrf:/data/input/wrf ^ + -v %METPLUS_DIR%:/data/output %METPLUS_IMAGE% ^ + /metplus/METplus/ush/run_metplus.py /config/PointStat_matthew.conf + +Progress information is displayed while the verification is performed. +**WARNING** log messages are expected because observations files are not available for every valid time and METplus is +configured to allow some missing inputs. An **ERROR** log message indicates that something went wrong. +METplus first converts the observation data files to a format that the MET tools can read using the MADIS2NC wrapper. +Point-Stat is run to generate statistics comparing METAR observations to surface-level model fields and +RAOB observations to "upper air" fields. +METplus will print its completion status when the processing finishes. + +The results of the METplus verification can be found in ``%WORKING_DIR%\metplus\point_stat``. +These files contain tabular output that can be viewed in a text editor. Turn off word wrapping for better viewing. +Refer to the MET User's Guide for more information about the +`Point-Stat output `_. +In the near future, this exercise will be extended to include instructions to visualize the results. diff --git a/docs/Users_Guide/usecases.rst b/docs/Users_Guide/usecases.rst index aea980d..440670f 100644 --- a/docs/Users_Guide/usecases.rst +++ b/docs/Users_Guide/usecases.rst @@ -10,6 +10,16 @@ Hurricane Matthew on Jetstream2 Navigate to :ref:`matthewjetstream` for more information. +Hurricane Matthew on Red Cloud +============================== + +Navigate to :ref:`matthewredcloud` for more information. + +Hurricane Matthew on Windows with Intel Processor +================================================= + +Navigate to :ref:`matthewwindows` for more information. + Land Use/Land Cover Change ========================== diff --git a/use_cases/Hurricane_Matthew/WRF/namelist.input b/use_cases/Hurricane_Matthew/WRF/namelist.input index 598ccda..9c544a7 100644 --- a/use_cases/Hurricane_Matthew/WRF/namelist.input +++ b/use_cases/Hurricane_Matthew/WRF/namelist.input @@ -3,24 +3,24 @@ run_hours = 48, run_minutes = 0, run_seconds = 0, - start_year = 2016, 2016, - start_month = 10, 10, - start_day = 06, 06, - start_hour = 00, 00, - end_year = 2016, 2016, - end_month = 10, 10, - end_day = 08, 08, - end_hour = 00, 00, - interval_seconds = 21600, - input_from_file = .true.,.true., - history_interval = 180, 180, - frames_per_outfile = 1, 1, + start_year = 2016, + start_month = 10, + start_day = 06, + start_hour = 00, + end_year = 2016, + end_month = 10, + end_day = 08, + end_hour = 0, + interval_seconds = 21600 + input_from_file = .true., + history_interval = 180, + frames_per_outfile = 1, restart = .false., restart_interval = 1440, - io_form_history = 2, - io_form_restart = 2, - io_form_input = 2, - io_form_boundary = 2, + io_form_history = 2 + io_form_restart = 2 + io_form_input = 2 + io_form_boundary = 2 iofields_filename = "vars_io.txt", "vars_io.txt", auxhist22_outname = "wrfout_zlev_d_", auxhist22_interval = 180, 180, @@ -33,16 +33,16 @@ / &domains - time_step = 90, + time_step = 150, time_step_fract_num = 0, time_step_fract_den = 1, max_dom = 1, - e_we = 91, 220, - e_sn = 100, 214, - e_vert = 45, 45, + e_we = 91, + e_sn = 100, + e_vert = 45, dzstretch_s = 1.1 p_top_requested = 5000, - num_metgrid_levels = 32, + num_metgrid_levels = 32 num_metgrid_soil_levels = 4, dx = 27000, dy = 27000, @@ -105,17 +105,17 @@ / &namelist_quilt - nio_tasks_per_group = 0, - nio_groups = 1, + nio_tasks_per_group = 0, + nio_groups = 1, / &diags - z_lev_diags = 1, - num_z_levels = 6, - z_levels = -80,-100,-200,-300,-400,-500, - p_lev_diags = 1, - num_press_levels = 10, - press_levels = 92500,85000,70000,50000,40000,30000,25000,20000,15000,10000, - use_tot_or_hyd_p = 1, - solar_diagnostics = 0, + z_lev_diags = 1, + num_z_levels = 6, + z_levels = -80,-100,-200,-300,-400,-500 + p_lev_diags = 1, + num_press_levels = 10, + press_levels = 92500,85000,70000,50000,40000,30000,25000,20000,15000,10000 + use_tot_or_hyd_p = 1, + solar_diagnostics = 0, / diff --git a/use_cases/Hurricane_Matthew/WRF/namelist.wps b/use_cases/Hurricane_Matthew/WRF/namelist.wps new file mode 100644 index 0000000..f3408a6 --- /dev/null +++ b/use_cases/Hurricane_Matthew/WRF/namelist.wps @@ -0,0 +1,35 @@ +&share + wrf_core = 'ARW', + max_dom = 1, + start_date = '2016-10-06_00:00:00', + end_date = '2016-10-08_00:00:00', + interval_seconds = 21600 +/ + +&geogrid + parent_id = 1, + parent_grid_ratio = 1, + i_parent_start = 1, + j_parent_start = 1, 25, + e_we = 91, + e_sn = 100, + geog_data_res = 'default', + dx = 27000, + dy = 27000, + map_proj = 'mercator', + ref_lat = 28.00, + ref_lon = -75.00, + truelat1 = 30.0, + truelat2 = 60.0, + stand_lon = -75.0, + geog_data_path = '/home/wrfuser/terrestrial_data/WPS_GEOG' +/ + +&ungrib + out_format = 'WPS', + prefix = 'FILE', +/ + +&metgrid + fg_name = 'FILE' +/ diff --git a/use_cases/Hurricane_Matthew/WRF/run.sh b/use_cases/Hurricane_Matthew/WRF/run.sh new file mode 100755 index 0000000..0e98468 --- /dev/null +++ b/use_cases/Hurricane_Matthew/WRF/run.sh @@ -0,0 +1,60 @@ +#! /bin/bash + +# script adapted from instructions at https://www2.mmm.ucar.edu/wrf/OnLineTutorial/CASES/SingleDomain/ungrib.php +# docker run -it -v /home/hahn/git:/home/wrfuser/git -v /mnt/storage/terrestrial_data:/home/wrfuser/terrestrial_data iwrf:latest /bin/bash + +source /etc/bashrc + +CYCLE_DIR="/tmp/hurricane_matthew" +WPS_DIR="/home/wrfuser/WPS" +WRF_DIR="/home/wrfuser/WRF" + +function main +{ + mkdir -p "${CYCLE_DIR}" + cd "${CYCLE_DIR}" + link_gfs_vtable + run_ungrib + run_geogrid + run_metgrid + run_real + run_wrf +} + +function link_gfs_vtable +{ + ln -sf "${WPS_DIR}/ungrib/Variable_Tables/Vtable.GFS" Vtable + ${WPS_DIR}/link_grib.csh "${CYCLE_DIR}/matthew/*.grib2" +} + +function run_ungrib +{ + ln -s "${WPS_DIR}/ungrib.exe" . 2>/dev/null + ./ungrib.exe +} + +function run_geogrid +{ + ln -s "${WPS_DIR}"/* . 2>/dev/null + ./geogrid.exe +} + +function run_metgrid +{ + ./metgrid.exe +} + +function run_real +{ + ln -s "${WRF_DIR}"/test/em_real/* . 2>/dev/null + ./real.exe +} + +function run_wrf +{ + ulimit -s unlimited + ln -s "${WRF_DIR}"/test/em_real/* . 2>/dev/null + mpirun ./wrf.exe +} + +main From fdbb986d8178a5c7540ed8bbf0769ccadf58c2b8 Mon Sep 17 00:00:00 2001 From: Ben Trumbore Date: Mon, 9 Sep 2024 18:58:24 -0400 Subject: [PATCH 3/3] Issue #49 - Add METplus instructions to the Jetstream2 tutorial (#56) * Roll back change to usecases.rst * Draft of the Jetstream2-Matthew tutorial instructions The instructions still need to be retested and the text vetted by Rich. It would be great if we could have a section at the end to validate or see the results. * Tests to see what Markdown format I should really use * Crazy-ass markdown language * Sigh * Switch the incorrect markdown to the correct, yet horrible, version * Edit text, fix typos and links * Make sure all commands work properly, add more info. * A few more tweaks before others view it * Revisions based on feedback and some procedural changes * Final edits before initial publication * fixed URL links, fixed header underline lengths, added link to matthew jetstream page from use cases page * added ID so heading can be linked using :ref: * use double underscores to create anonymous reference for links that have the same text * updated versions of actions to prevent node.js deprecated warnings * added orphan identifier to prevent warning that this page is not included in the toctree * fixed typos * Refactor the existing doc in preparation for adding METPlus doc * Add initial version of METPlus instructions These steps were migrated from work by George, modified to fit with the WRF instructions already present here. * Tweaks to documentation after full testing The final upper_air analysis still fails due to a lack of pressure level data files from the WRF simulation. * Get METPlus working Also moved around some of the commands and updated some text. * Edits from final testing pass on new and revised content * Final tweaks before creating a pull request * Add text about viewing the output of METPlus * changed METPlus to METplus * split long docker run commands into multiple lines for better readability * ignore auto-generated file * use env var to reference obs data volume to more easily adapt to other use cases * rewording and avoid using analysis to describe the METplus verification step to avoid confusion withe the METplus Analysis tools that will be added later to generate images * A few formatting tweaks after the code review * ignore auto-generated file * Refactor instructions in preparation for edits related to changes in how WRF is run. * Add a run script and config file and update the existing config file to work with new documentation * change run.sh permissions * Remove data downloading from run.sh * Finalize edits of the tutorial to use config files from new location and add data download steps. --------- Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com>