diff --git a/manual/v1.0.0/en/html/.buildinfo b/manual/v1.0.0/en/html/.buildinfo new file mode 100644 index 0000000..ef11828 --- /dev/null +++ b/manual/v1.0.0/en/html/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 525bcd588269cd9b5473f122fc1b6e83 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/manual/v1.0.0/en/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png b/manual/v1.0.0/en/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png new file mode 100644 index 0000000..60ec48e Binary files /dev/null and b/manual/v1.0.0/en/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png differ diff --git a/manual/v1.0.0/en/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png.map b/manual/v1.0.0/en/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png.map new file mode 100644 index 0000000..138a9c8 --- /dev/null +++ b/manual/v1.0.0/en/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png.map @@ -0,0 +1,2 @@ + + diff --git a/manual/v1.0.0/en/html/_images/task_view.png b/manual/v1.0.0/en/html/_images/task_view.png new file mode 100644 index 0000000..44d038a Binary files /dev/null and b/manual/v1.0.0/en/html/_images/task_view.png differ diff --git a/manual/v1.0.0/en/html/_images/tutorial_dsqss_M1.png b/manual/v1.0.0/en/html/_images/tutorial_dsqss_M1.png new file mode 100644 index 0000000..cd47bf4 Binary files /dev/null and b/manual/v1.0.0/en/html/_images/tutorial_dsqss_M1.png differ diff --git a/manual/v1.0.0/en/html/_images/tutorial_dsqss_M2.png b/manual/v1.0.0/en/html/_images/tutorial_dsqss_M2.png new file mode 100644 index 0000000..95e2a6a Binary files /dev/null and b/manual/v1.0.0/en/html/_images/tutorial_dsqss_M2.png differ diff --git a/manual/v1.0.0/en/html/_images/tutorial_hphi_gap.png b/manual/v1.0.0/en/html/_images/tutorial_hphi_gap.png new file mode 100644 index 0000000..65d612c Binary files /dev/null and b/manual/v1.0.0/en/html/_images/tutorial_hphi_gap.png differ diff --git a/manual/v1.0.0/en/html/_sources/index.rst.txt b/manual/v1.0.0/en/html/_sources/index.rst.txt new file mode 100644 index 0000000..3b2de1d --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/index.rst.txt @@ -0,0 +1,20 @@ +.. HTP-tools documentation master file, created by + sphinx-quickstart on Fri Jun 30 11:02:31 2023. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Moller Users Guide +===================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + moller/index + +.. Indices and tables +.. ================== + +.. * :ref:`genindex` +.. * :ref:`modindex` +.. * :ref:`search` diff --git a/manual/v1.0.0/en/html/_sources/moller/about/index.rst.txt b/manual/v1.0.0/en/html/_sources/moller/about/index.rst.txt new file mode 100644 index 0000000..ddc8951 --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/about/index.rst.txt @@ -0,0 +1,74 @@ +**************************************************************** +Introduction +**************************************************************** + +What is moller? +---------------------------------------------------------------- + +In recent years, the use of machine learning for predicting material properties and designing substances (known as materials informatics) has gained considerable attention. +The accuracy of machine learning depends heavily on the preparation of appropriate training data. +Therefore, the development of tools and environments for the rapid generation of training data is expected to contribute significantly to the advancement of research in materials informatics. + +moller is provided as part of the HTP-Tools package, designed to support high-throughput computations. +It is a tool for generating batch job scripts for supercomputers and clusters, allowing parallel execution of programs under a series of computational conditions, such as parameter parallelism. +Currently, it supports the supercomputers ohtaka (using the slurm job scheduler) and kugui (using the PBS job scheduler) provided by the Institute for Solid State Physics, University of Tokyo. + +License +---------------------------------------------------------------- + +The distribution of the program package and the source codes for moller follow GNU General Public License version 3 (GPL v3) or later. + +Contributors +---------------------------------------------------------------- + +This software was developed by the following contributors. + +- ver.1.0.0 (Released on 2024/03/06) + +- ver.1.0-beta (Released on 2023/12/28) + + - Developers + + - Kazuyoshi Yoshimi (The Instutite for Solid State Physics, The University of Tokyo) + + - Tatsumi Aoyama (The Instutite for Solid State Physics, The University of Tokyo) + + - Yuichi Motoyama (The Instutite for Solid State Physics, The University of Tokyo) + + - Masahiro Fukuda (The Instutite for Solid State Physics, The University of Tokyo) + + - Kota Ido (The Instutite for Solid State Physics, The University of Tokyo) + + - Tetsuya Fukushima (The National Institute of Advanced Industrial Science and Technology (AIST)) + + - Shusuke Kasamatsu (Yamagata University) + + - Takashi Koretsune (Tohoku University) + + - Project Corrdinator + + - Taisuke Ozaki (The Instutite for Solid State Physics, The University of Tokyo) + + +Copyright +---------------------------------------------------------------- + +.. only:: html + + |copy| *2023- The University of Tokyo. All rights reserved.* + + .. |copy| unicode:: 0xA9 .. copyright sign + +.. only:: latex + + :math:`\copyright` *2023- The University of Tokyo. All rights reserved.* + +This software was developed with the support of "Project for advancement of software usability in materials science" of The Institute for Solid State Physics, The University of Tokyo. + +Operating environment +---------------------------------------------------------------- + +moller was tested on the following platforms: + +- Ubuntu Linux + python3 + diff --git a/manual/v1.0.0/en/html/_sources/moller/appendix/index.rst.txt b/manual/v1.0.0/en/html/_sources/moller/appendix/index.rst.txt new file mode 100644 index 0000000..f776269 --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/appendix/index.rst.txt @@ -0,0 +1,241 @@ +================================================================ +Extension guide +================================================================ + +N.B. The content of this section may vary depending on the version of *moller*. + + +Bulk job execution by *moller* +---------------------------------------------------------------- + +A bulk job execution means that a set of small tasks are executed in parallel within a single batch job submitted to a large batch queue. It is schematically shown as follows, in which N tasks are launched as background processes and executed in parallel, and a ``wait`` statement is invoked to wait for all tasks to be completed. + +.. code-block:: bash + + task param_1 & + task param_2 & + ... + task param_N & + wait + +To manage the bulk job, it is required to distribute nodes and cores allocated to the batch job over the tasks param_1 ... param_N so that they are executed on distinct nodes and cores. It is also needed to arrange task execution where at most N tasks are run simultaneously according to the allocated resources. + +Hereafter a job script generated by *moller* will be denoted as a moller script. +In a moller script, the concurrent execution and control of tasks are managed by GNU parallel [1]. It takes a list holding the items param_1 ... param_N and runs commands for each items in parallel. An example is given as follows, where list.dat contains param_1 ... param_N in each line. + +.. code-block:: bash + + cat list.dat | parallel -j N task + +The number of concurrent tasks is determined at runtime from the number of nodes and cores obtained from the execution environment and the degree of parallelism (number of nodes, processes, and threads specified by node parameter). + +The way to assign tasks to nodes and cores varies according to the job scheduler. +For SLURM job scheduler variants, the concurrent calls of ``srun`` command within the batch job are appropriately assigned to the nodes and cores by exploiting the option of exclusive resource usage. The explicit option may depend on the platform. + +On the other hand, for PBS job scheduler variants that do not have such features, the distribution of nodes and cores to tasks has to be handled within the moller script. The nodes and cores allocated to a batch job are divided into *slots*, and the slots are assigned to the concurrent tasks. The division is determined from the allocated nodes and cores and the degree of parallelism of the task, and kept in a form of table variables. Within a task, the programs are executed on the assigned hosts and cores (optionally pinned to the program) through the options to mpirun (or mpiexec) and the environment variables. This feature depends on the MPI implementation. + +**Reference** + +[1] `O. Tange, GNU Parallel - The command-Line Power Tool, ;login: The USENIX Magazine, February 2011:42-47. `_ + +How *moller* works +---------------------------------------------------------------- + +Structure of moller script +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +*moller* reads the input YAML file and generates a job script for bulk execution. The structure of the generated script is described as follows. + +#. Header + + This part contains the options to the job scheduler. The content of the platform section is formatted according to the type of job scheduler. This feature depends on platforms. + +#. Prologue + + This part corresponds to the prologue section of the input file. The content of the ``code`` block is written as-is. + +#. Function definitions + + This part contains the definitions of functions and variables used within the moller script. The description of the functions will be given in the next section. This feature depends on platforms. + +#. Processing Command-line options + + The SLURM variants accept additional arguments to the job submission command (sbatch) that are passed to the job script as a command-line options. The name of the list file and/or the options such as the retry feature can be processed. + + For the PBS variants, these command-line arguments are ignored, and therefore the name of the list file is fixed to ``list.dat`` by default, and the retry feature may be enabled by modifying the script with ``retry`` set to 1. + +#. Description of tasks + + This part contains the description of tasks specified in the jobs section of the input file. When more than one task is given, the following procedure is applied to each task. + + When parallel = false, the content of the ``run`` block is written as-is. + + When parallel = true (default), a function is created by the name task_{task name} that contains the pre-processing for concurrent execution and the content of the ``run`` block. The keywords for the parallel execution (``srun``, ``mpiexec``, or ``mpirun``) are substituted by the platform-dependent command. The definition of the task function is followed by the concurrent execution command. + +#. Epilogue + + This part corresponds to the epilogue section of the input file. The content of the ``code`` block is written as-is. + + +Brief description of moller script functions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The main functions of the moller script is briefly described below. + +- ``run_parallel`` + + This function performs concurrent execution of task functions. It takes the degree of parallelism, the task function, and the status file as arguments. Within the function, it calls ``_find_multiplicity`` to find the number of tasks that can be run simultaneously, and invokes GNU parallel to run tasks concurrently. The task function is actually wrapped by the ``_run_parallel_task`` function to deal with the nested call of GNU parallel. + + The platform-dependence is separated out by the functions ``_find_multiplicity`` and ``_setup_run_parallel``. + +- ``_find_multiplicity`` + + This function determines the number of tasks that can be simultaneously executed on the allocated resources (nodes and cores) taking account of the degree of parallelism of the task. + For the PBS variants, the compute nodes and the cores are divided into slots, and the slots are kept as table variables. + The information obtained at the batch job execution is summarized as follows. + + - For SLURM variants, + + The number of allocated nodes (``_nnodes``) + + ``SLURM_NNODES`` + + The number of allocated cores (``_ncores``) + + ``SLURM_CPUS_ON_NODE`` + + - For PBS variants, + + The allocated nodes (``_nodes[]``) + + The list of unique compute nodes is obtained from the file given by ``PBS_NODEFILE``. + + The number of allocated nodes (``_nnodes``) + + The number of entries of ``_nodes[]``. + + The number of allocated cores + + Searched from below (in order of examination) + + - ``NCPUS`` (for PBS Professional) + + - ``OMP_NUM_THREADS`` + + - ``core`` parameter of platform section (written in the script as a variable ``moller_core``.) + + - ``ncpus`` or ``ppn`` parameter in the header. + +- ``_setup_run_parallel`` + + This function is called from the ``run_parallel`` function to supplement some procedures before running GNU parallel. + For PBS variants, the slot tables are exported so that the task functions can refer to. + For SLURM variants, there is nothing to do. + +The structure of the task function is described as follows. + +- A task function is created by a name ``task_{task name}``. + +- The arguments of the task function are 1) the degree of parallelism (the number of nodes, processes, and threads), 2) the execution directory (that corresponds to the entry of list file), 3) the slot ID assigned by GNU parallel. + +- The platform-dependent ``_setup_taskenv`` function is called to set up execution environment. + For PBS variants, the compute node and the cores are obtained from the slot table based on the slot ID. For SLURM variants, there is nothing to do. + +- The ``_is_ready`` function is called to check if the preceding task has been completed successfully. If it is true, the remaining part of the function is executed. Otherwise, the task is terminated with the status -1. + +- The content of the ``code`` block is written. The keywords for parallel calculation (``srun``, ``mpiexec``, or ``mpirun``) are substituted by the command provided for the platform. + + +How to extend *moller* for other systems +---------------------------------------------------------------- +The latest version of *moller* provides profiles for ISSP supercomputer systems, ohtaka and kugui. An extension guide to use *moller* in other systems is described in the following. + +Class structure +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The platform-dependent parts of *moller* are placed in the directory ``platform/``. +Their class structure is depicted below. + +.. graphviz:: + + digraph class_diagram { + size="5,5" + node[shape=record,style=filled,fillcolor=gray95] + edge[dir=back,arrowtail=empty] + + Platform[label="{Platform (base.py)}"] + BaseSlurm[label="{BaseSlurm (base_slurm.py)}"] + BasePBS[label="{BasePBS (base_pbs.py)}"] + BaseDefault[label="{BaseDefault (base_default.py)}"] + + Ohtaka[label="{Ohtaka (ohtaka.py)}"] + Kugui[label="{Kugui (kugui.py)}"] + Pbs[label="{Pbs (pbs.py)}"] + Default[label="{DefaultPlatform (default.py)}"] + + Platform->BaseSlurm + Platform->BasePBS + Platform->BaseDefault + + BaseSlurm->Ohtaka + BasePBS->Kugui + BasePBS->Pbs + BaseDefault->Default + } + + +A factory is provided to select a system in the input file. +A class is imported in ``platform/__init__.py`` and registered to the factory by ``register_platform(system_name, class_name)``, and then it becomes available in the system parameter of the platform section in the input YAML file. + +SLURM job scheduler variants +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +For the SLURM job scheduler variants, the system-specific settings should be applied to a derived class of BaseSlurm class. +The string that substitute the keywords for the parallel execution of programs is given by the return value of ``parallel_command()`` method. It corresponds to the ``srun`` command with the options for the exclusive use of resources. See ohtaka.py for an example. + +PBS job scheduler variants +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +For the PBS job scheduler variants (PBS Professional, OpenPBS, Torque, and others), the system-specific settings should be applied to a derived class of BasePBS class. + +There are two ways of specifying the number of nodes for a batch job in the PBS variants. PBS Professional takes the form of select=N:ncpus=n, while Torque and others take the form of node=N:ppn=n. The BasePBS class has a parameter ``self.pbs_use_old_format`` that is set to ``True`` for the latter type. + +The number of cores per compute node can be specified by node parameter of the input file, while the default value may be set for a known system. In kugui.py, the number of cores per node is set to 128 by default. + +Customizing features +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +When further customization is required, the methods of the base class may be overridden in the derived classes. The list of relevant methods is given below. + +- ``setup`` + + This method extracts parameters of the platform section. + +- ``parallel_command`` + + This method returns a string that is used to substitute the keywords for parallel execution of programs (``srun``, ``mpiexec``, ``mpirun``). + + +- ``generate_header`` + + This method generates the header part of the job script that contains options to the job scheduler. + +- ``generate_function`` + + This method generates functions that are used within the moller script. It calls the following methods to generate function body and variable definitions. + + - ``generate_variable`` + - ``generate_function_body`` + + The definitions of the functions are provided as embedded strings within the class. + +Porting to new type of job scheduler +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +The platform-dependent parts of the moller scripts are the calculation of task multiplicity, the resource distribution over tasks, and the command string of parallel calculation. +The internal functions need to be developed with the following information on the platform: + +- how to acquire the allocated nodes and cores from the environment at the execution of batch jobs. + +- how to launch parallel calculation (e.g. mpiexec command) and how to assign the nodes and cores to the command. + +To find which environment variables are set within the batch jobs, it may be useful to call ``printenv`` command in the job script. + +Trouble shooting +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When the variable ``_debug`` in the moller script is set to 1, the debug outputs are printed during the execution of the batch jobs. If the job does not work well, it is recommended that the debug option is turned on and the output is examined to check if the internal parameters are appropriately defined. diff --git a/manual/v1.0.0/en/html/_sources/moller/basic-usage.rst.txt b/manual/v1.0.0/en/html/_sources/moller/basic-usage.rst.txt new file mode 100644 index 0000000..35b1f10 --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/basic-usage.rst.txt @@ -0,0 +1,150 @@ +Installation and basic usage +================================================================ + +**Prerequisite** + + Comprehensive calculation utility ``moller`` included in HTP-tools requires the following programs and libraries: + + - Python 3.x + - ruamel.yaml module + - tabulate module + - GNU Parallel (It must be installed on servers or compute nodes on which the job script is executed.) + +**Official pages** + + - `GitHub repository `_ + +**Downloads** + + moller can be downloaded by the following command with git: + + .. code-block:: bash + + $ git clone https://github.com/issp-center-dev/Moller.git + +**Installation** + + Once the source files are obtained, you can install moller by running the following command. The required libraries will also be installed automatically at the same time. + + .. code-block:: bash + + $ cd ./Moller + $ python3 -m pip install . + + The executable files ``moller`` and ``moller_status`` will be installed. + +**Directory structure** + + :: + + . + |-- LICENSE + |-- README.md + |-- pyproject.toml + |-- docs/ + | |-- ja/ + | |-- en/ + | |-- tutorial/ + |-- src/ + | |-- moller/ + | |-- __init__.py + | |-- main.py + | |-- platform/ + | | |-- __init__.py + | | |-- base.py + | | |-- base_slurm.py + | | |-- base_pbs.py + | | |-- base_default.py + | | |-- ohtaka.py + | | |-- kugui.py + | | |-- pbs.py + | | |-- default.py + | | |-- function.py + | | |-- utils.py + | |-- moller_status.py + |-- sample/ + +**Basic usage** + + ``moller`` is a tool to generate batch job scripts for supercomputers in which programs are run in parallel for a set of execution conditions using concurrent execution features. + + #. Prepare job description file + + First, you need to create a job description file in YAML format that describes the tasks to be executed on supercomputers. The details of the format will be given in File Format section of the manual. + + #. Run command + + Run moller program with the job description file, and a batch job script will be generated. + + .. code-block:: bash + + $ moller -o job.sh input.yaml + + #. Run batch jobs + + Transfer the generated batch job scripts to the supercomputer. + Prepare a directory for each parameter set, and create a list of the directory names in a file ``list.dat``. + Note that the list contains the relative paths to the directory where the batch job is executed, or the absolute paths. + + Once the list file is ready, you may submit a batch job. The actual command depends on the system. + + - In case of ISSP system B (ohtaka) + + In ohtaka, slurm is used for the job scheduling system. In order to submit a batch job, a command ``sbatch`` is invoked with the job script as an argument. Parameters can be passed to the script as additional arguments; the name of list file is specified as a parameter. + + .. code-block:: bash + + $ sbatch job.sh list.dat + + If the list file is not specified, ``list.dat`` is used by default. + + - In case of ISSP system C (kugui) + + In kugui, PBS is used for the job scheduling system. In order to submit a batch job, a command ``qsub`` is invoked with the job script. There is no way to pass parameters to the script, and thus the name of the list file is fixed to ``list.dat``. + + .. code-block:: bash + + $ qsub job.sh + + #. Check the status of the calculation + + After the job finishes, you may run the following command + + .. code-block:: bash + + $ moller_status input.yaml list.dat + + to obtain a report whether the calculation for each parameter set has been completed successfully. + + + #. Retry/resume job + + In case the job is terminated during the execution, the job may be resumed by submitting the batch job again with the same list file. + The yet unexecuted jobs (as well as the unfinished jobs) will be run. + + + - In case of ISSP system B (ohtaka) + + .. code-block:: bash + + $ sbatch job.sh list.dat + + To retry the failed tasks, the batch job is submitted with ``--retry`` command line option. + + .. code-block:: bash + + $ sbatch job.sh --retry list.dat + + - In case of ISSP system C (kugui) + + For kugui, to retry the failed tasks, the batch job script should be edited so that ``retry=0`` is changed to be ``retry=1``. + + .. code-block:: bash + + $ qsub job.sh + + Then, the batch job is submitted as above. + +**References** + +[1] `O. Tange, GNU Parallel - The command-Line Power Tool, ;login: The USENIX Magazine, February 2011:42-47. `_ diff --git a/manual/v1.0.0/en/html/_sources/moller/command/index.rst.txt b/manual/v1.0.0/en/html/_sources/moller/command/index.rst.txt new file mode 100644 index 0000000..7069b0f --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/command/index.rst.txt @@ -0,0 +1,95 @@ +Command reference +================================================================ + +moller +---------------------------------------------------------------- + + Generate a batch job script for comprehensive calculation + +SYNOPSIS: + + .. code-block:: bash + + moller [-o job_script] input_yaml + +DESCRIPTION: + + This program reads a job description file specified by input_yaml, and generates a batch job script. It takes the following command line options. + + - ``-o``, ``--output`` ``job_script`` + + specifies output file name. This option supersedes the output_file parameter in the job description file. If no output file is specified, the result is written to the standard output. + + - ``-h`` + + displays help and exits. + +moller_status +---------------------------------------------------------------- + + Reports the status of comprehensive calculation jobs + +SYNOPSIS: + + .. code-block:: bash + + moller_status [-h] [--text|--csv|--html] [--ok|--failed|--skipped|--collapsed|--yet] [-o output_file] input_yaml [list_file] + +DESCRIPTION: + + This program summarizes the status of tasks in jobs that are executed through the job scripts generated by moller, and outputs a report. The tasks are obtained from the job description file specified by ``input_yaml``. The list of jobs is read from the file specified by ``list_file``. If it is not provided, the job list is extracted from the log files. + The format of the output is specified by a command line option. The default is the text format. The output file is specified by the ``-o`` or ``--output`` option. If it is not specified, the output is written to the standard output. + + - output formats + + specifies the format of the output by one of the following options. If more than one option are specified, the program terminates with error. The default is the text format. + + - ``--text`` + displays in text format. + - ``--csv`` + displays in CSV (comma-separated values) format. + - ``--html`` + displays in HTML format. + + - ``input_yaml`` + + specifies the job description file for ``moller``. + + - ``list_file`` + + specifies the file that contains list of job directories. If this file is not specified, the list will be obtained from the logfile of the batch job ``stat_{task}.dat``. + + - ``-o``, ``--output`` ``output_file`` + + specifies the output file name. If it is omitted, the result is written to the standard output. + + - filter options + + specifies the status of jobs to be displayed by one of the following options. All jobs are displayed by default. + + - ``--ok`` + displays only jobs whose tasks are all completed successfully. + + - ``--failed`` + displays jobs, any of whose tasks are failed with errors, skipped, or not performed. + + - ``--skipped`` + displays jobs, any of whose tasks are skipped. + + - ``--yet`` + displays jobs, any of whose tasks are not yet performed. + + - ``--collapsed`` + displays jobs, any of whose tasks are failed with errors. + + - ``--all`` + displays all jobs. (default) + + - ``-h`` + + displays help and exits. + +FILES: + + When the programs are executed concurrently using the job script generated by ``moller``, the status of the tasks are written in log files ``stat_{task}.dat``. ``moller_status`` reads these log files and makes a summary. + diff --git a/manual/v1.0.0/en/html/_sources/moller/filespec/index.rst.txt b/manual/v1.0.0/en/html/_sources/moller/filespec/index.rst.txt new file mode 100644 index 0000000..6f2d4fb --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/filespec/index.rst.txt @@ -0,0 +1,134 @@ +.. _sec-fileformat: + +File format +================================================================ + +Job description file +---------------------------------------------------------------- + +A job description file contains configurations to generate a batch job script by ``moller``. It is prepared in text-based YAML format. This file consists of the following parts: + + 1. General settings: specifies job names and output files. + + 2. platform section: specifies the system on which batch jobs are executed, and the settings for the batch jobs. + + 3. prologue and epilogue sections: specifies initial settings and finalization within the batch job. + + 4. jobs section: specifies tasks to be carried out in the betch job script. + +General settings +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + ``name`` + + specifies the name of the batch job. If it is not given, the job name is left unspecified. (Usually the name of the job script is used as the job name.) + + ``description`` + + provides the description of the batch job. It is regarded as comments. + + ``output_file`` + + specifies the output file name. When the output file is given by a command-line option, the command-line parameter is used. When none of them is specified, the result is written to the standard output. + + +platform +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ``system`` + + specifies the target system. At present, either ``ohtaka`` or ``kugui`` is accepted. + + ``queue`` + + specifies the name of batch queue. The actual value depends on the target system. + + ``node`` + + specifies the number of nodes to be used. It is given by an integer specifying the number of nodes, or a list of integers specifying ``[`` number of nodes, number of cores per node ``]``. The accepted range of parameters depends on the system and queue settings. (The number of cores is accepted for kugui and default systems; otherwise it is ignored.) + + ``core`` + + specifies the number of cores per node be used. The accepted range of parameters depends on the system and queue settings. If both the second parameter of ``node`` and ``core`` are specified, the value in ``core`` is used. (This parameter is accepted for kugui and default target systems.) + + ``elapsed`` + + specifies the elapsed time of the batch job in HH:MM:SS format. + + ``options`` + + specifies other batch job options. It is given as a list of options or as a multiple-line string with options in each line. The heading directives (e.g. ``#PBS`` or ``#SBATCH``) are not included. The examples are given as follows. + + - an example of SLURM job script in the string format: + + .. code-block:: yaml + + options: | + --mail-type=BEGIN,END,FAIL + --mail-user=user@sample.com + --requeue + + - an example of PBS job script in the list format: + + .. code-block:: yaml + + options: + - -m bea + - -M user@sample.com + - -r y + +prologue, epilogue +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +``prologue`` section specifies the commands to be run prior to executing the tasks. It is used, for example, to set environment variables of libraries and paths. +``epilogue`` section specifies the commands to be run after all tasks have been completed. + + ``code`` + + specifies the content of the commands in the form of shell script. It is embedded in the batch job script, and executed within the batch job. + +jobs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +``jobs`` section specifies a sequence of tasks in a table format, with the task names as keys and the contents as values. + + + key + + name of task + + value + + a table that consists of the following items: + + ``description`` + + provides the description of the task. It is regarded as comments. + + ``node`` + + specifies the degree of parallelism in one of the following formats. + + - ``[`` number of processes, number of threads per process ``]`` + - ``[`` number of nodes, number of processes, number of threads per process ``]`` + - number of nodes + + When the number of nodes is specified, the specified number of nodes are exclusively assigned to a job. Otherwise, if the required number of cores for a job is smaller than the number of cores in a node, more than one job may be allocated in a single node. If a job uses more than one node, the required number of nodes are exclusively assigned. + + ``parallel`` + + This parameter is set to ``true`` if the tasks of different jobs are executed in parallel. It is set to ``false`` if they are executed sequentially. The default value is ``true``. + + ``run`` + + The content of the task is described in the form of shell script. The executions of MPI parallel programs or MPI/OpenMPI hybrid parallel programs are specified by + + .. code-block:: bash + + srun prog [arg1, ...] + + where, in addition to the keyword ``srun``, ``mpirun`` or ``mpiexec`` is accepted. In the resulting job script, they are replaced by the command (e.g. ``srun`` or ``mpirun``) and the degree of parallelism specified by ``node`` parameter. + +List file +---------------------------------------------------------------- + +This file contains a list of jobs. It is a text file with a job name in a line (The name of the directory is associated with the name of the job). + +``moller`` assumes that a directory is assigned to each job, and the tasks of the job are executed within the directory. These directories are supposed to be located in the directory where the batch job is submitted. diff --git a/manual/v1.0.0/en/html/_sources/moller/index.rst.txt b/manual/v1.0.0/en/html/_sources/moller/index.rst.txt new file mode 100644 index 0000000..1f65be1 --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/index.rst.txt @@ -0,0 +1,15 @@ +**************************************************************** +Comprehensive Calculation Utility (moller) +**************************************************************** + + +.. toctree:: + :maxdepth: 2 + :numbered: 2 + + about/index + basic-usage + tutorial/index + command/index + filespec/index + appendix/index diff --git a/manual/v1.0.0/en/html/_sources/moller/tutorial/basic.rst.txt b/manual/v1.0.0/en/html/_sources/moller/tutorial/basic.rst.txt new file mode 100644 index 0000000..c2e0f70 --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/tutorial/basic.rst.txt @@ -0,0 +1,161 @@ +.. _sec-tutorial: + +Basic usage +-------------- + +The procedure to use the batch job script generator ``moller`` consists of the following steps: +First, a job description file is prepared that defines the tasks to be executed. Next, the program ``moller`` is to be run with the job description file, and a batch job script is generated. The script is then transferred to the target supercomputer system. A batch job is submitted with the script to perform calculations. +In this tutorial, we will explain the steps along a sample in ``docs/tutorial/moller``. + +Prepare job description file +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A job description file describes the content of calculations that are carried out in a batch job. +Here, a *batch job* is used for a set of instructions submitted to job schedulers running on supercomputer systems. +On the other hand, for the concurrent execution of programs that ``moller`` handles, we call a series of program executions performed for one set of parameters by a *job*. A job may consist of several contents that we call *tasks*. ``moller`` organizes job execution so that each task is run in parallel, and the synchronization between the jobs is taken at every start and end of the tasks. + +.. only:: html + + .. figure:: ../../_static/task_view.png + :alt: Tasks and jobs + + An example of tasks and jobs: Three jobs #1 ... #3 are carried out within a single batch job. Each job corresponds to different set of parameters. A job consists of 4 tasks. Each task is run in parallel among these three jobs. + +.. only:: latex + + .. figure:: ../../_static/task_view.pdf + :scale: 100% + :alt: Tasks and jobs + + An example of tasks and jobs: Three jobs #1 ... #3 are carried out within a single batch job. Each job corresponds to different set of parameters. A job consists of 4 tasks. Each task is run in parallel among these three jobs. + +An example of job description file is presented in the following. A job description file is in text-based YAML format. It contains parameters concerning the platform and the batch job, task descriptions, and pre/post-processes. + +.. literalinclude:: ../../../../tutorial/moller/input.yaml + +In the platform section, you can specify the type of platform on which to execute. +In this case, settings for the System B (ohtaka) are being made. + +The prologue section describes the preprocessing of the batch job. +It details the common command line to be executed before running the task. + +In the jobs section, the content of the task processing is described. +The series of tasks to be executed in the job are described in a table format, +with the task name as the key and the processing content as the value. + +In this example, a task that first outputs "start..." is defined with the task name "start". +Here, it is set to ``parallel = false``. +In this case, the content of ``run`` parameter is executed sequentially. + +Next, a task that outputs "hello world." is defined with the task name "hello_world" . +Here, since "parallel" is not set, it is treated as ``parallel = true``. +In this case, parallel processing is performed on a per-job basis. +Similarly, next, a task that outputs "hello world again." is defined with the task name "hello_again". + +Finally, in the epilogue section, the post-processing of the batch job is described. +It details the common command line to be executed after running the task. + +For more details on the specifications, please refer to the chapter :ref:`File Format `. + +Generate batch job script +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``moller`` is to be run with the job description file (``input.yaml``) as an input as follows: + +.. code-block:: bash + + $ moller -o job.sh input.yaml + +A batch job script is generated and written to a file specified by the parameter in the job description file, or the command line option ``-o`` or ``--output``. If both specified, the command line option is used. If neither specified, the result is written to the standard output. + +The obtained batch job script is to be transferred to the target system as required. It is noted that the batch job script is prepared for ``bash``; users may need to set the shell for job execution to ``bash``. (A care should be needed if the login shell is set to csh-type.) + + +Create list file +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A list of jobs is to be created. ``moller`` is designed so that each job is executed within a directory prepared for the job with the job name. The job list can be created, for example, by the following command: + +.. code-block:: bash + + $ /usr/bin/ls -1d * > list.dat + +In this tutorial, an utility script ``make_inputs.sh`` is enclosed which generates datasets and a list file. + +.. code-block:: bash + + $ bash ./make_inputs.sh + +By running the above command, a directory ``output`` and a set of subdirectories ``dataset-0001`` ... ``dataset-0020`` that correspond to datasets, and a list file ``list.dat`` are created. + + +Run batch job +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The batch job is to be submitted to the job scheduler with the batch job script. +In this example, the job script and the input parameter files are copied into the ``output`` directory, and the current directory is changed to ``output` as follows: + +.. code-block:: bash + + $ cp job.sh input.yaml output/ + $ cd output + +In ohtaka, slurm is used for the job scheduling system. In order to submit a batch job, a command ``sbatch`` is invoked with the job script as an argument. +Parameters can be passed to the script as additional arguments; the name of list file is specified as a parameter. + +.. code-block:: bash + + $ sbatch job.sh list.dat + +Files named 'result.txt' will be generated in each directory listed on the list.dat. +You can confirm that the 'result.txt' contains the strings 'hello world.' and 'hello world again.' as the job results. + +Check status +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The status of execution of the tasks are written to log files. A tool named ``moller_status`` is provided to generate a summary of the status of each job from the log files. It is invoked by the following command in the directory where the batch job is executed: + +.. code-block:: bash + + $ moller_status input.yaml list.dat + +The command takes the job description file ``input.yaml`` and the list file ``list.dat`` as arguments. The list file may be omitted; in this case, the information of the jobs are extracted from the log files. + +An example of the output is shown below: + +.. literalinclude:: ../../../../tutorial/moller/reference/status.txt + + +where "o" corresponds to a task that has been completed successfully, "x" corresponds to a failed task, "-" corresponds to a skipped task because the previous task has been terminated with errors, and "." corresponds to a task yet unexecuted. +In the above example, the all tasks have been completed successfully. + + +Rerun failed tasks +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If a task fails, the subsequent tasks within the job will not be executed. +The following is an example of job status in which each task fails by 10% change. + +.. literalinclude:: ../../../../tutorial/moller/reference/status_failed.txt + +There, the jobs of dataset_0003 and dataset_0004 failed at task1, and the subsequent task2 and task3 were not executed. The other jobs were successful at task1, and proceeded to task2. +In this way, each job is executed independently of other jobs. + +Users can rerun the failed tasks by submitting the batch job with the retry option. +For SLURM job scheduler (e.g. used in ISSP system B), resubmit the job as follows: + +.. code-block:: bash + + $ sbatch job.sh --retry list.dat + +For PBS job scheduler (e.g. used in ISSP system C), edit the job script so that the line ``retry=0`` is replaced by ``retry=1``, and resubmit the job. + +.. literalinclude:: ../../../../tutorial/moller/reference/status_retry.txt + +The tasks that have failed will be executed in the second run. +In the above example, the task1 for dataset_0003 was successful, but the task2 failed. +For dataset_0004, task1, task2, and task3 were successfully executed. +For the jobs of datasets whose tasks have already finished successfully, the second run will not do anything. + +N.B. the list file must not be modified on the rerun. The jobs are managed according to the order of entries in the list file, and therefore, if the order is changed, the jobs will not be executed properly. + diff --git a/manual/v1.0.0/en/html/_sources/moller/tutorial/dsqss.rst.txt b/manual/v1.0.0/en/html/_sources/moller/tutorial/dsqss.rst.txt new file mode 100644 index 0000000..ba956c9 --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/tutorial/dsqss.rst.txt @@ -0,0 +1,104 @@ +Example for *moller* calculation with DSQSS +--------------------------------------------- + +What’s this sample? +~~~~~~~~~~~~~~~~~~~~~~~ + +This is an example of ``moller`` with +`DSQSS `__, which is an +open-source software package for performing the path-integral Monte +Calro method for quantum many-body problem. In this example, we will +calculate the temperature dependence of the magnetic susceptibilities +:math:`\chi` of the :math:`S=1/2` (:math:`M=1` in the terms of DSQSS) +and :math:`S=1` (:math:`M=2`) antiferromagnetic Heisenberg chain under +the periodic boundary condition with several length. By using +``moller``, calculations with different parameters (:math:`M, L, T`) are +performed in parallel. + +This example is corresponding to `one of the official +tutorials `__. + +Preparation +~~~~~~~~~~~~~~ + +Make sure that ``moller`` (HTP-tools) package and ``DSQSS`` are +installed. In this tutorial, the calculation will be performed using the +supercomputer system ``ohtaka`` at ISSP. + +How to run +~~~~~~~~~~~~~~ + +1. Prepare dataset + + Run the script ``make_inputs.sh`` enclosed within this package. + + .. code:: bash + + $ bash ./make_inputs.sh + + This make an ``output`` directory (if already exists, first removed + then make again). Under ``output``, working directories for each + parameter like ``L_8__M_1__T_1.0`` will be generated. A list of the + directories is written to a file ``list.dat``. + +2. Generate job script using ``moller`` + + Generate a job script from the job description file using ``moller``, + and store the script as a file named ``job.sh``. + + .. code:: bash + + $ moller -o job.sh input.yaml + + Then, copy ``job.sh`` in the ``output`` directory, and change + directory to ``output``. + +3. Run batch job + + Submit a batch job with the job list as an argument. + + .. code:: bash + + $ sbatch job.sh list.dat + +4. Check status + + The status of task execution will be summarized by ``moller_status`` + program. + + .. code:: bash + + $ moller_status input.yaml list.dat + +5. Gather results + + After calculation finishes, gather result by + + .. code:: bash + + $ python3 ../extract_result.py list.dat + + This script writes results into a text file ``result.dat`` which has + 5 columns, :math:`M`, :math:`L`, :math:`T`, mean of :math:`\chi`, and + stderr of :math:`\chi`. + + To visualize the results, GNUPLOT files ``plot_M1.plt`` and + ``plot_M2.plt`` are available. + + .. code:: bash + + $ gnuplot --persist plot_M1.plt + $ gnuplot --persist plot_M2.plt + + |susceptibilities for S=1/2| |susceptibilities for S=2| + + The main different between :math:`S=1/2` and :math:`S=1` AFH chains + is whether the excitation gap vanishes (:math:`S=1/2`) or remains + (:math:`S=1`). Reflecting this, the magnetic susceptibility in the + very low temperature region remains finite (:math:`S=1/2`) or + vanishes (:math:`S=1`). Note that for the :math:`S=1/2` case, the + finite size effect opens the spin gap and therefore the magnetic + susceptibility of small chains drops. + +.. |susceptibilities for S=1/2| image:: ../../../../images/tutorial_dsqss_M1.* +.. |susceptibilities for S=2| image:: ../../../../images/tutorial_dsqss_M2.* diff --git a/manual/v1.0.0/en/html/_sources/moller/tutorial/hphi.rst.txt b/manual/v1.0.0/en/html/_sources/moller/tutorial/hphi.rst.txt new file mode 100644 index 0000000..40c180b --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/tutorial/hphi.rst.txt @@ -0,0 +1,105 @@ +Example for *moller* calculation with HPhi +------------------------------------------ + +What’s this sample? +~~~~~~~~~~~~~~~~~~~ + +This is an example of ``moller`` with +`HPhi `__, which is an +open-source software package for performing the exact diagonalization +method for quantum many-body problems. In this example, we will +calculate the system size dependence of the excitation gap +:math:`\Delta` of the :math:`S=1/2` (``2S_1`` directory) and :math:`S=1` +(``2S_2``) antiferromagnetic Heisenberg chain under the periodic +boundary condition. By using ``moller``, calculations with different +system sizes are performed in parallel. This is corresponding to +`section +1.4 `__ +of HPhi's official tutorial. + +Preparation +~~~~~~~~~~~ + +Make sure that ``moller`` (HTP-tools) package and ``HPhi`` are +installed. In this tutorial, the calculation will be performed using the +supercomputer system ``ohtaka`` at ISSP. + +How to run +~~~~~~~~~~ + +1. Prepare dataset + + Run the script ``make_inputs.sh`` enclosed within this package. + + .. code:: bash + + $ bash ./make_inputs.sh + + Working directories ``L_8``, ``L_10``, …, ``L_24`` (up to ``L_18`` + for ``2S_2``)) will be generated. A list of the directories is + written to a file ``list.dat``. Additionally, a shell script, + ``extract_gap.sh``, to gather energy gaps from working directories is + generated. + +2. Generate job script using ``moller`` + + Generate a job script from the job description file using ``moller``, + and store the script as a file named ``job.sh``. + + .. code:: bash + + $ moller -o job.sh input.yaml + +3. Run batch job + + Submit a batch job with the job list as an argument. + + .. code:: bash + + $ sbatch job.sh list.dat + +4. Check status + + The status of task execution will be summarized by ``moller_status`` + program. + + .. code:: bash + + $ moller_status input.yaml list.dat + +5. Gather results + + Once the calculation finishes, gather energy gaps from jobs as + + .. code:: bash + + $ bash extract_gap.sh + + This script writes pairs of the length :math:`L` and the gap + :math:`\Delta` into a text file, ``gap.dat``. + + To visualize the results, a Gnuplot file ``gap.plt`` is available. In + this file, the obtained gap data are fitted by the expected curves, + + .. math:: \Delta(L; S=1/2) = \Delta_\infty + A/L + + and + + .. math:: \Delta(L; S=1) = \Delta_\infty + B\exp(-CL). + + The result is plotted as follows: + + .. code:: bash + + $ gnuplot --persist gap.plt + + .. figure:: ../../../../images/tutorial_hphi_gap.* + :alt: Finite size effect of spin gap + + Finite size effect of spin gap + + Note that the logarithmic correction causes the spin gap for + :math:`S=1/2` to remain finite. On the other hand, for :math:`S=1`, + the extrapolated value :math:`\Delta_\infty = 0.417(1)` is consistent + with the previous results, e.g., :math:`\Delta_\infty = 0.41048(6)` + by QMC (Todo and Kato, PRL **87**, 047203 (2001)). diff --git a/manual/v1.0.0/en/html/_sources/moller/tutorial/index.rst.txt b/manual/v1.0.0/en/html/_sources/moller/tutorial/index.rst.txt new file mode 100644 index 0000000..3e89c68 --- /dev/null +++ b/manual/v1.0.0/en/html/_sources/moller/tutorial/index.rst.txt @@ -0,0 +1,9 @@ +Tutorial +================================================================ + +.. toctree:: + :maxdepth: 2 + + basic + hphi + dsqss diff --git a/manual/v1.0.0/en/html/_static/alabaster.css b/manual/v1.0.0/en/html/_static/alabaster.css new file mode 100644 index 0000000..55f9cb1 --- /dev/null +++ b/manual/v1.0.0/en/html/_static/alabaster.css @@ -0,0 +1,708 @@ +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: Georgia; + font-size: 17px; + background-color: #fff; + color: #000; + margin: 0; + padding: 0; +} + + +div.document { + width: 940px; + margin: 30px auto 0 auto; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 220px; +} + +div.sphinxsidebar { + width: 220px; + font-size: 14px; + line-height: 1.5; +} + +hr { + border: 1px solid #B1B4B6; +} + +div.body { + background-color: #fff; + color: #3E4349; + padding: 0 30px 0 30px; +} + +div.body > .section { + text-align: left; +} + +div.footer { + width: 940px; + margin: 20px auto 30px auto; + font-size: 14px; + color: #888; + text-align: right; +} + +div.footer a { + color: #888; +} + +p.caption { + font-family: inherit; + font-size: inherit; +} + + +div.relations { + display: none; +} + + +div.sphinxsidebar { + max-height: 100%; + overflow-y: auto; +} + +div.sphinxsidebar a { + color: #444; + text-decoration: none; + border-bottom: 1px dotted #999; +} + +div.sphinxsidebar a:hover { + border-bottom: 1px solid #999; +} + +div.sphinxsidebarwrapper { + padding: 18px 10px; +} + +div.sphinxsidebarwrapper p.logo { + padding: 0; + margin: -10px 0 0 0px; + text-align: center; +} + +div.sphinxsidebarwrapper h1.logo { + margin-top: -10px; + text-align: center; + margin-bottom: 5px; + text-align: left; +} + +div.sphinxsidebarwrapper h1.logo-name { + margin-top: 0px; +} + +div.sphinxsidebarwrapper p.blurb { + margin-top: 0; + font-style: normal; +} + +div.sphinxsidebar h3, +div.sphinxsidebar h4 { + font-family: Georgia; + color: #444; + font-size: 24px; + font-weight: normal; + margin: 0 0 5px 0; + padding: 0; +} + +div.sphinxsidebar h4 { + font-size: 20px; +} + +div.sphinxsidebar h3 a { + color: #444; +} + +div.sphinxsidebar p.logo a, +div.sphinxsidebar h3 a, +div.sphinxsidebar p.logo a:hover, +div.sphinxsidebar h3 a:hover { + border: none; +} + +div.sphinxsidebar p { + color: #555; + margin: 10px 0; +} + +div.sphinxsidebar ul { + margin: 10px 0; + padding: 0; + color: #000; +} + +div.sphinxsidebar ul li.toctree-l1 > a { + font-size: 120%; +} + +div.sphinxsidebar ul li.toctree-l2 > a { + font-size: 110%; +} + +div.sphinxsidebar input { + border: 1px solid #CCC; + font-family: Georgia; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + width: 160px; +} + +div.sphinxsidebar .search > div { + display: table-cell; +} + +div.sphinxsidebar hr { + border: none; + height: 1px; + color: #AAA; + background: #AAA; + + text-align: left; + margin-left: 0; + width: 50%; +} + +div.sphinxsidebar .badge { + border-bottom: none; +} + +div.sphinxsidebar .badge:hover { + border-bottom: none; +} + +/* To address an issue with donation coming after search */ +div.sphinxsidebar h3.donation { + margin-top: 10px; +} + +/* -- body styles ----------------------------------------------------------- */ + +a { + color: #004B6B; + text-decoration: underline; +} + +a:hover { + color: #6D4100; + text-decoration: underline; +} + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: Georgia; + font-weight: normal; + margin: 30px 0px 10px 0px; + padding: 0; +} + +div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } +div.body h2 { font-size: 180%; } +div.body h3 { font-size: 150%; } +div.body h4 { font-size: 130%; } +div.body h5 { font-size: 100%; } +div.body h6 { font-size: 100%; } + +a.headerlink { + color: #DDD; + padding: 0 4px; + text-decoration: none; +} + +a.headerlink:hover { + color: #444; + background: #EAEAEA; +} + +div.body p, div.body dd, div.body li { + line-height: 1.4em; +} + +div.admonition { + margin: 20px 0px; + padding: 10px 30px; + background-color: #EEE; + border: 1px solid #CCC; +} + +div.admonition tt.xref, div.admonition code.xref, div.admonition a tt { + background-color: #FBFBFB; + border-bottom: 1px solid #fafafa; +} + +div.admonition p.admonition-title { + font-family: Georgia; + font-weight: normal; + font-size: 24px; + margin: 0 0 10px 0; + padding: 0; + line-height: 1; +} + +div.admonition p.last { + margin-bottom: 0; +} + +div.highlight { + background-color: #fff; +} + +dt:target, .highlight { + background: #FAF3E8; +} + +div.warning { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.danger { + background-color: #FCC; + border: 1px solid #FAA; + -moz-box-shadow: 2px 2px 4px #D52C2C; + -webkit-box-shadow: 2px 2px 4px #D52C2C; + box-shadow: 2px 2px 4px #D52C2C; +} + +div.error { + background-color: #FCC; + border: 1px solid #FAA; + -moz-box-shadow: 2px 2px 4px #D52C2C; + -webkit-box-shadow: 2px 2px 4px #D52C2C; + box-shadow: 2px 2px 4px #D52C2C; +} + +div.caution { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.attention { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.important { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.note { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.tip { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.hint { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.seealso { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.topic { + background-color: #EEE; +} + +p.admonition-title { + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +pre, tt, code { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; + font-size: 0.9em; +} + +.hll { + background-color: #FFC; + margin: 0 -12px; + padding: 0 12px; + display: block; +} + +img.screenshot { +} + +tt.descname, tt.descclassname, code.descname, code.descclassname { + font-size: 0.95em; +} + +tt.descname, code.descname { + padding-right: 0.08em; +} + +img.screenshot { + -moz-box-shadow: 2px 2px 4px #EEE; + -webkit-box-shadow: 2px 2px 4px #EEE; + box-shadow: 2px 2px 4px #EEE; +} + +table.docutils { + border: 1px solid #888; + -moz-box-shadow: 2px 2px 4px #EEE; + -webkit-box-shadow: 2px 2px 4px #EEE; + box-shadow: 2px 2px 4px #EEE; +} + +table.docutils td, table.docutils th { + border: 1px solid #888; + padding: 0.25em 0.7em; +} + +table.field-list, table.footnote { + border: none; + -moz-box-shadow: none; + -webkit-box-shadow: none; + box-shadow: none; +} + +table.footnote { + margin: 15px 0; + width: 100%; + border: 1px solid #EEE; + background: #FDFDFD; + font-size: 0.9em; +} + +table.footnote + table.footnote { + margin-top: -15px; + border-top: none; +} + +table.field-list th { + padding: 0 0.8em 0 0; +} + +table.field-list td { + padding: 0; +} + +table.field-list p { + margin-bottom: 0.8em; +} + +/* Cloned from + * https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68 + */ +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +table.footnote td.label { + width: .1px; + padding: 0.3em 0 0.3em 0.5em; +} + +table.footnote td { + padding: 0.3em 0.5em; +} + +dl { + margin-left: 0; + margin-right: 0; + margin-top: 0; + padding: 0; +} + +dl dd { + margin-left: 30px; +} + +blockquote { + margin: 0 0 0 30px; + padding: 0; +} + +ul, ol { + /* Matches the 30px from the narrow-screen "li > ul" selector below */ + margin: 10px 0 10px 30px; + padding: 0; +} + +pre { + background: #EEE; + padding: 7px 30px; + margin: 15px 0px; + line-height: 1.3em; +} + +div.viewcode-block:target { + background: #ffd; +} + +dl pre, blockquote pre, li pre { + margin-left: 0; + padding-left: 30px; +} + +tt, code { + background-color: #ecf0f3; + color: #222; + /* padding: 1px 2px; */ +} + +tt.xref, code.xref, a tt { + background-color: #FBFBFB; + border-bottom: 1px solid #fff; +} + +a.reference { + text-decoration: none; + border-bottom: 1px dotted #004B6B; +} + +/* Don't put an underline on images */ +a.image-reference, a.image-reference:hover { + border-bottom: none; +} + +a.reference:hover { + border-bottom: 1px solid #6D4100; +} + +a.footnote-reference { + text-decoration: none; + font-size: 0.7em; + vertical-align: top; + border-bottom: 1px dotted #004B6B; +} + +a.footnote-reference:hover { + border-bottom: 1px solid #6D4100; +} + +a:hover tt, a:hover code { + background: #EEE; +} + + +@media screen and (max-width: 870px) { + + div.sphinxsidebar { + display: none; + } + + div.document { + width: 100%; + + } + + div.documentwrapper { + margin-left: 0; + margin-top: 0; + margin-right: 0; + margin-bottom: 0; + } + + div.bodywrapper { + margin-top: 0; + margin-right: 0; + margin-bottom: 0; + margin-left: 0; + } + + ul { + margin-left: 0; + } + + li > ul { + /* Matches the 30px from the "ul, ol" selector above */ + margin-left: 30px; + } + + .document { + width: auto; + } + + .footer { + width: auto; + } + + .bodywrapper { + margin: 0; + } + + .footer { + width: auto; + } + + .github { + display: none; + } + + + +} + + + +@media screen and (max-width: 875px) { + + body { + margin: 0; + padding: 20px 30px; + } + + div.documentwrapper { + float: none; + background: #fff; + } + + div.sphinxsidebar { + display: block; + float: none; + width: 102.5%; + margin: 50px -30px -20px -30px; + padding: 10px 20px; + background: #333; + color: #FFF; + } + + div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, + div.sphinxsidebar h3 a { + color: #fff; + } + + div.sphinxsidebar a { + color: #AAA; + } + + div.sphinxsidebar p.logo { + display: none; + } + + div.document { + width: 100%; + margin: 0; + } + + div.footer { + display: none; + } + + div.bodywrapper { + margin: 0; + } + + div.body { + min-height: 0; + padding: 0; + } + + .rtd_doc_footer { + display: none; + } + + .document { + width: auto; + } + + .footer { + width: auto; + } + + .footer { + width: auto; + } + + .github { + display: none; + } +} + + +/* misc. */ + +.revsys-inline { + display: none!important; +} + +/* Hide ugly table cell borders in ..bibliography:: directive output */ +table.docutils.citation, table.docutils.citation td, table.docutils.citation th { + border: none; + /* Below needed in some edge cases; if not applied, bottom shadows appear */ + -moz-box-shadow: none; + -webkit-box-shadow: none; + box-shadow: none; +} + + +/* relbar */ + +.related { + line-height: 30px; + width: 100%; + font-size: 0.9rem; +} + +.related.top { + border-bottom: 1px solid #EEE; + margin-bottom: 20px; +} + +.related.bottom { + border-top: 1px solid #EEE; +} + +.related ul { + padding: 0; + margin: 0; + list-style: none; +} + +.related li { + display: inline; +} + +nav#rellinks { + float: right; +} + +nav#rellinks li+li:before { + content: "|"; +} + +nav#breadcrumbs li+li:before { + content: "\00BB"; +} + +/* Hide certain items when printing */ +@media print { + div.related { + display: none; + } +} \ No newline at end of file diff --git a/manual/v1.0.0/en/html/_static/basic.css b/manual/v1.0.0/en/html/_static/basic.css new file mode 100644 index 0000000..4157edf --- /dev/null +++ b/manual/v1.0.0/en/html/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: inherit; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/manual/v1.0.0/en/html/_static/custom.css b/manual/v1.0.0/en/html/_static/custom.css new file mode 100644 index 0000000..2a924f1 --- /dev/null +++ b/manual/v1.0.0/en/html/_static/custom.css @@ -0,0 +1 @@ +/* This file intentionally left blank. */ diff --git a/manual/v1.0.0/en/html/_static/doctools.js b/manual/v1.0.0/en/html/_static/doctools.js new file mode 100644 index 0000000..d06a71d --- /dev/null +++ b/manual/v1.0.0/en/html/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/manual/v1.0.0/en/html/_static/documentation_options.js b/manual/v1.0.0/en/html/_static/documentation_options.js new file mode 100644 index 0000000..89435bb --- /dev/null +++ b/manual/v1.0.0/en/html/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '1.0.0', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/manual/v1.0.0/en/html/_static/file.png b/manual/v1.0.0/en/html/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/manual/v1.0.0/en/html/_static/file.png differ diff --git a/manual/v1.0.0/en/html/_static/graphviz.css b/manual/v1.0.0/en/html/_static/graphviz.css new file mode 100644 index 0000000..8d81c02 --- /dev/null +++ b/manual/v1.0.0/en/html/_static/graphviz.css @@ -0,0 +1,19 @@ +/* + * graphviz.css + * ~~~~~~~~~~~~ + * + * Sphinx stylesheet -- graphviz extension. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +img.graphviz { + border: 0; + max-width: 100%; +} + +object.graphviz { + max-width: 100%; +} diff --git a/manual/v1.0.0/en/html/_static/language_data.js b/manual/v1.0.0/en/html/_static/language_data.js new file mode 100644 index 0000000..250f566 --- /dev/null +++ b/manual/v1.0.0/en/html/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, is available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/manual/v1.0.0/en/html/_static/minus.png b/manual/v1.0.0/en/html/_static/minus.png new file mode 100644 index 0000000..d96755f Binary files /dev/null and b/manual/v1.0.0/en/html/_static/minus.png differ diff --git a/manual/v1.0.0/en/html/_static/plus.png b/manual/v1.0.0/en/html/_static/plus.png new file mode 100644 index 0000000..7107cec Binary files /dev/null and b/manual/v1.0.0/en/html/_static/plus.png differ diff --git a/manual/v1.0.0/en/html/_static/pygments.css b/manual/v1.0.0/en/html/_static/pygments.css new file mode 100644 index 0000000..0d49244 --- /dev/null +++ b/manual/v1.0.0/en/html/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #208050 } /* Literal.Number.Bin */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #06287e } /* Name.Function.Magic */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/manual/v1.0.0/en/html/_static/searchtools.js b/manual/v1.0.0/en/html/_static/searchtools.js new file mode 100644 index 0000000..7918c3f --- /dev/null +++ b/manual/v1.0.0/en/html/_static/searchtools.js @@ -0,0 +1,574 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + `Search finished, found ${resultCount} page(s) matching the search query.` + ); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent !== undefined) return docContent.textContent; + console.warn( + "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + /** + * execute search (requires search index to be loaded) + */ + query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + // array of [docname, title, anchor, descr, score, filename] + let results = []; + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // lookup as object + objectTerms.forEach((term) => + results.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); + + // now sort the results by score (in opposite order of appearance, since the + // display function below uses pop() to retrieve items) and then + // alphabetically + results.sort((a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; + }); + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + results = results.reverse(); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord) && !terms[word]) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord) && !titleTerms[word]) + arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); + }); + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); + else fileMap.set(file, [word]); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords) => { + const text = Search.htmlToText(htmlText); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/manual/v1.0.0/en/html/_static/sphinx_highlight.js b/manual/v1.0.0/en/html/_static/sphinx_highlight.js new file mode 100644 index 0000000..8a96c69 --- /dev/null +++ b/manual/v1.0.0/en/html/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/manual/v1.0.0/en/html/_static/task_view.pdf b/manual/v1.0.0/en/html/_static/task_view.pdf new file mode 100644 index 0000000..3af0603 Binary files /dev/null and b/manual/v1.0.0/en/html/_static/task_view.pdf differ diff --git a/manual/v1.0.0/en/html/_static/task_view.png b/manual/v1.0.0/en/html/_static/task_view.png new file mode 100644 index 0000000..44d038a Binary files /dev/null and b/manual/v1.0.0/en/html/_static/task_view.png differ diff --git a/manual/v1.0.0/en/html/genindex.html b/manual/v1.0.0/en/html/genindex.html new file mode 100644 index 0000000..031500b --- /dev/null +++ b/manual/v1.0.0/en/html/genindex.html @@ -0,0 +1,104 @@ + + + + + + + Index — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ + +

Index

+ +
+ +
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/index.html b/manual/v1.0.0/en/html/index.html new file mode 100644 index 0000000..3dd4491 --- /dev/null +++ b/manual/v1.0.0/en/html/index.html @@ -0,0 +1,121 @@ + + + + + + + + Moller Users Guide — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/about/index.html b/manual/v1.0.0/en/html/moller/about/index.html new file mode 100644 index 0000000..5ca30ae --- /dev/null +++ b/manual/v1.0.0/en/html/moller/about/index.html @@ -0,0 +1,173 @@ + + + + + + + + 1. Introduction — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

1. Introduction

+
+

1.1. What is moller?

+

In recent years, the use of machine learning for predicting material properties and designing substances (known as materials informatics) has gained considerable attention. +The accuracy of machine learning depends heavily on the preparation of appropriate training data. +Therefore, the development of tools and environments for the rapid generation of training data is expected to contribute significantly to the advancement of research in materials informatics.

+

moller is provided as part of the HTP-Tools package, designed to support high-throughput computations. +It is a tool for generating batch job scripts for supercomputers and clusters, allowing parallel execution of programs under a series of computational conditions, such as parameter parallelism. +Currently, it supports the supercomputers ohtaka (using the slurm job scheduler) and kugui (using the PBS job scheduler) provided by the Institute for Solid State Physics, University of Tokyo.

+
+
+

1.2. License

+

The distribution of the program package and the source codes for moller follow GNU General Public License version 3 (GPL v3) or later.

+
+
+

1.3. Contributors

+

This software was developed by the following contributors.

+
    +
  • ver.1.0.0 (Released on 2024/03/06)

  • +
  • ver.1.0-beta (Released on 2023/12/28)

    +
      +
    • Developers

      +
        +
      • Kazuyoshi Yoshimi (The Instutite for Solid State Physics, The University of Tokyo)

      • +
      • Tatsumi Aoyama (The Instutite for Solid State Physics, The University of Tokyo)

      • +
      • Yuichi Motoyama (The Instutite for Solid State Physics, The University of Tokyo)

      • +
      • Masahiro Fukuda (The Instutite for Solid State Physics, The University of Tokyo)

      • +
      • Kota Ido (The Instutite for Solid State Physics, The University of Tokyo)

      • +
      • Tetsuya Fukushima (The National Institute of Advanced Industrial Science and Technology (AIST))

      • +
      • Shusuke Kasamatsu (Yamagata University)

      • +
      • Takashi Koretsune (Tohoku University)

      • +
      +
    • +
    • Project Corrdinator

      +
        +
      • Taisuke Ozaki (The Instutite for Solid State Physics, The University of Tokyo)

      • +
      +
    • +
    +
  • +
+
+ +
+

1.5. Operating environment

+

moller was tested on the following platforms:

+
    +
  • Ubuntu Linux + python3

  • +
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/appendix/index.html b/manual/v1.0.0/en/html/moller/appendix/index.html new file mode 100644 index 0000000..2368bee --- /dev/null +++ b/manual/v1.0.0/en/html/moller/appendix/index.html @@ -0,0 +1,321 @@ + + + + + + + + 6. Extension guide — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

6. Extension guide

+

N.B. The content of this section may vary depending on the version of moller.

+
+

6.1. Bulk job execution by moller

+

A bulk job execution means that a set of small tasks are executed in parallel within a single batch job submitted to a large batch queue. It is schematically shown as follows, in which N tasks are launched as background processes and executed in parallel, and a wait statement is invoked to wait for all tasks to be completed.

+
task param_1 &
+task param_2 &
+     ...
+task param_N &
+wait
+
+
+

To manage the bulk job, it is required to distribute nodes and cores allocated to the batch job over the tasks param_1 … param_N so that they are executed on distinct nodes and cores. It is also needed to arrange task execution where at most N tasks are run simultaneously according to the allocated resources.

+

Hereafter a job script generated by moller will be denoted as a moller script. +In a moller script, the concurrent execution and control of tasks are managed by GNU parallel [1]. It takes a list holding the items param_1 … param_N and runs commands for each items in parallel. An example is given as follows, where list.dat contains param_1 … param_N in each line.

+
cat list.dat | parallel -j N task
+
+
+

The number of concurrent tasks is determined at runtime from the number of nodes and cores obtained from the execution environment and the degree of parallelism (number of nodes, processes, and threads specified by node parameter).

+

The way to assign tasks to nodes and cores varies according to the job scheduler. +For SLURM job scheduler variants, the concurrent calls of srun command within the batch job are appropriately assigned to the nodes and cores by exploiting the option of exclusive resource usage. The explicit option may depend on the platform.

+

On the other hand, for PBS job scheduler variants that do not have such features, the distribution of nodes and cores to tasks has to be handled within the moller script. The nodes and cores allocated to a batch job are divided into slots, and the slots are assigned to the concurrent tasks. The division is determined from the allocated nodes and cores and the degree of parallelism of the task, and kept in a form of table variables. Within a task, the programs are executed on the assigned hosts and cores (optionally pinned to the program) through the options to mpirun (or mpiexec) and the environment variables. This feature depends on the MPI implementation.

+

Reference

+

[1] O. Tange, GNU Parallel - The command-Line Power Tool, ;login: The USENIX Magazine, February 2011:42-47.

+
+
+

6.2. How moller works

+
+

Structure of moller script

+

moller reads the input YAML file and generates a job script for bulk execution. The structure of the generated script is described as follows.

+
    +
  1. Header

    +

    This part contains the options to the job scheduler. The content of the platform section is formatted according to the type of job scheduler. This feature depends on platforms.

    +
  2. +
  3. Prologue

    +

    This part corresponds to the prologue section of the input file. The content of the code block is written as-is.

    +
  4. +
  5. Function definitions

    +

    This part contains the definitions of functions and variables used within the moller script. The description of the functions will be given in the next section. This feature depends on platforms.

    +
  6. +
  7. Processing Command-line options

    +

    The SLURM variants accept additional arguments to the job submission command (sbatch) that are passed to the job script as a command-line options. The name of the list file and/or the options such as the retry feature can be processed.

    +

    For the PBS variants, these command-line arguments are ignored, and therefore the name of the list file is fixed to list.dat by default, and the retry feature may be enabled by modifying the script with retry set to 1.

    +
  8. +
  9. Description of tasks

    +

    This part contains the description of tasks specified in the jobs section of the input file. When more than one task is given, the following procedure is applied to each task.

    +
    +

    When parallel = false, the content of the run block is written as-is.

    +

    When parallel = true (default), a function is created by the name task_{task name} that contains the pre-processing for concurrent execution and the content of the run block. The keywords for the parallel execution (srun, mpiexec, or mpirun) are substituted by the platform-dependent command. The definition of the task function is followed by the concurrent execution command.

    +
    +
  10. +
  11. Epilogue

    +

    This part corresponds to the epilogue section of the input file. The content of the code block is written as-is.

    +
  12. +
+
+
+

Brief description of moller script functions

+

The main functions of the moller script is briefly described below.

+
    +
  • run_parallel

    +

    This function performs concurrent execution of task functions. It takes the degree of parallelism, the task function, and the status file as arguments. Within the function, it calls _find_multiplicity to find the number of tasks that can be run simultaneously, and invokes GNU parallel to run tasks concurrently. The task function is actually wrapped by the _run_parallel_task function to deal with the nested call of GNU parallel.

    +

    The platform-dependence is separated out by the functions _find_multiplicity and _setup_run_parallel.

    +
  • +
  • _find_multiplicity

    +

    This function determines the number of tasks that can be simultaneously executed on the allocated resources (nodes and cores) taking account of the degree of parallelism of the task. +For the PBS variants, the compute nodes and the cores are divided into slots, and the slots are kept as table variables. +The information obtained at the batch job execution is summarized as follows.

    +
      +
    • For SLURM variants,

      +
      +

      The number of allocated nodes (_nnodes)

      +
      +

      SLURM_NNODES

      +
      +

      The number of allocated cores (_ncores)

      +
      +

      SLURM_CPUS_ON_NODE

      +
      +
      +
    • +
    • For PBS variants,

      +
      +

      The allocated nodes (_nodes[])

      +
      +

      The list of unique compute nodes is obtained from the file given by PBS_NODEFILE.

      +
      +

      The number of allocated nodes (_nnodes)

      +
      +

      The number of entries of _nodes[].

      +
      +

      The number of allocated cores

      +
      +

      Searched from below (in order of examination)

      +
        +
      • NCPUS (for PBS Professional)

      • +
      • OMP_NUM_THREADS

      • +
      • core parameter of platform section (written in the script as a variable moller_core.)

      • +
      • ncpus or ppn parameter in the header.

      • +
      +
      +
      +
    • +
    +
  • +
  • _setup_run_parallel

    +

    This function is called from the run_parallel function to supplement some procedures before running GNU parallel. +For PBS variants, the slot tables are exported so that the task functions can refer to. +For SLURM variants, there is nothing to do.

    +
  • +
+

The structure of the task function is described as follows.

+
    +
  • A task function is created by a name task_{task name}.

  • +
  • The arguments of the task function are 1) the degree of parallelism (the number of nodes, processes, and threads), 2) the execution directory (that corresponds to the entry of list file), 3) the slot ID assigned by GNU parallel.

  • +
  • The platform-dependent _setup_taskenv function is called to set up execution environment. +For PBS variants, the compute node and the cores are obtained from the slot table based on the slot ID. For SLURM variants, there is nothing to do.

  • +
  • The _is_ready function is called to check if the preceding task has been completed successfully. If it is true, the remaining part of the function is executed. Otherwise, the task is terminated with the status -1.

  • +
  • The content of the code block is written. The keywords for parallel calculation (srun, mpiexec, or mpirun) are substituted by the command provided for the platform.

  • +
+
+
+
+

6.3. How to extend moller for other systems

+

The latest version of moller provides profiles for ISSP supercomputer systems, ohtaka and kugui. An extension guide to use moller in other systems is described in the following.

+
+

Class structure

+

The platform-dependent parts of moller are placed in the directory platform/. +Their class structure is depicted below.

+
digraph class_diagram {
+size="5,5"
+node[shape=record,style=filled,fillcolor=gray95]
+edge[dir=back,arrowtail=empty]
+
+Platform[label="{Platform (base.py)}"]
+BaseSlurm[label="{BaseSlurm (base_slurm.py)}"]
+BasePBS[label="{BasePBS (base_pbs.py)}"]
+BaseDefault[label="{BaseDefault (base_default.py)}"]
+
+Ohtaka[label="{Ohtaka (ohtaka.py)}"]
+Kugui[label="{Kugui (kugui.py)}"]
+Pbs[label="{Pbs (pbs.py)}"]
+Default[label="{DefaultPlatform (default.py)}"]
+
+Platform->BaseSlurm
+Platform->BasePBS
+Platform->BaseDefault
+
+BaseSlurm->Ohtaka
+BasePBS->Kugui
+BasePBS->Pbs
+BaseDefault->Default
+}
+

A factory is provided to select a system in the input file. +A class is imported in platform/__init__.py and registered to the factory by register_platform(system_name, class_name), and then it becomes available in the system parameter of the platform section in the input YAML file.

+
+
+

SLURM job scheduler variants

+

For the SLURM job scheduler variants, the system-specific settings should be applied to a derived class of BaseSlurm class. +The string that substitute the keywords for the parallel execution of programs is given by the return value of parallel_command() method. It corresponds to the srun command with the options for the exclusive use of resources. See ohtaka.py for an example.

+
+
+

PBS job scheduler variants

+

For the PBS job scheduler variants (PBS Professional, OpenPBS, Torque, and others), the system-specific settings should be applied to a derived class of BasePBS class.

+

There are two ways of specifying the number of nodes for a batch job in the PBS variants. PBS Professional takes the form of select=N:ncpus=n, while Torque and others take the form of node=N:ppn=n. The BasePBS class has a parameter self.pbs_use_old_format that is set to True for the latter type.

+

The number of cores per compute node can be specified by node parameter of the input file, while the default value may be set for a known system. In kugui.py, the number of cores per node is set to 128 by default.

+
+
+

Customizing features

+

When further customization is required, the methods of the base class may be overridden in the derived classes. The list of relevant methods is given below.

+
    +
  • setup

    +

    This method extracts parameters of the platform section.

    +
  • +
  • parallel_command

    +

    This method returns a string that is used to substitute the keywords for parallel execution of programs (srun, mpiexec, mpirun).

    +
  • +
  • generate_header

    +

    This method generates the header part of the job script that contains options to the job scheduler.

    +
  • +
  • generate_function

    +

    This method generates functions that are used within the moller script. It calls the following methods to generate function body and variable definitions.

    +
      +
    • generate_variable

    • +
    • generate_function_body

    • +
    +

    The definitions of the functions are provided as embedded strings within the class.

    +
  • +
+
+
+

Porting to new type of job scheduler

+

The platform-dependent parts of the moller scripts are the calculation of task multiplicity, the resource distribution over tasks, and the command string of parallel calculation. +The internal functions need to be developed with the following information on the platform:

+
    +
  • how to acquire the allocated nodes and cores from the environment at the execution of batch jobs.

  • +
  • how to launch parallel calculation (e.g. mpiexec command) and how to assign the nodes and cores to the command.

  • +
+

To find which environment variables are set within the batch jobs, it may be useful to call printenv command in the job script.

+
+
+

Trouble shooting

+

When the variable _debug in the moller script is set to 1, the debug outputs are printed during the execution of the batch jobs. If the job does not work well, it is recommended that the debug option is turned on and the output is examined to check if the internal parameters are appropriately defined.

+
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/basic-usage.html b/manual/v1.0.0/en/html/moller/basic-usage.html new file mode 100644 index 0000000..ba1789c --- /dev/null +++ b/manual/v1.0.0/en/html/moller/basic-usage.html @@ -0,0 +1,259 @@ + + + + + + + + 2. Installation and basic usage — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

2. Installation and basic usage

+

Prerequisite

+
+

Comprehensive calculation utility moller included in HTP-tools requires the following programs and libraries:

+
    +
  • Python 3.x

  • +
  • ruamel.yaml module

  • +
  • tabulate module

  • +
  • GNU Parallel (It must be installed on servers or compute nodes on which the job script is executed.)

  • +
+
+

Official pages

+
+
+

Downloads

+
+

moller can be downloaded by the following command with git:

+
$ git clone https://github.com/issp-center-dev/Moller.git
+
+
+
+

Installation

+
+

Once the source files are obtained, you can install moller by running the following command. The required libraries will also be installed automatically at the same time.

+
$ cd ./Moller
+$ python3 -m pip install .
+
+
+

The executable files moller and moller_status will be installed.

+
+

Directory structure

+
+
.
+|-- LICENSE
+|-- README.md
+|-- pyproject.toml
+|-- docs/
+|   |-- ja/
+|   |-- en/
+|   |-- tutorial/
+|-- src/
+|   |-- moller/
+|       |-- __init__.py
+|       |-- main.py
+|       |-- platform/
+|       |   |-- __init__.py
+|       |   |-- base.py
+|       |   |-- base_slurm.py
+|       |   |-- base_pbs.py
+|       |   |-- base_default.py
+|       |   |-- ohtaka.py
+|       |   |-- kugui.py
+|       |   |-- pbs.py
+|       |   |-- default.py
+|       |   |-- function.py
+|       |   |-- utils.py
+|       |-- moller_status.py
+|-- sample/
+
+
+
+

Basic usage

+
+

moller is a tool to generate batch job scripts for supercomputers in which programs are run in parallel for a set of execution conditions using concurrent execution features.

+
    +
  1. Prepare job description file

    +
    +

    First, you need to create a job description file in YAML format that describes the tasks to be executed on supercomputers. The details of the format will be given in File Format section of the manual.

    +
    +
  2. +
  3. Run command

    +
    +

    Run moller program with the job description file, and a batch job script will be generated.

    +
    $ moller -o job.sh input.yaml
    +
    +
    +
    +
  4. +
  5. Run batch jobs

    +
    +

    Transfer the generated batch job scripts to the supercomputer. +Prepare a directory for each parameter set, and create a list of the directory names in a file list.dat. +Note that the list contains the relative paths to the directory where the batch job is executed, or the absolute paths.

    +

    Once the list file is ready, you may submit a batch job. The actual command depends on the system.

    +
      +
    • In case of ISSP system B (ohtaka)

      +

      In ohtaka, slurm is used for the job scheduling system. In order to submit a batch job, a command sbatch is invoked with the job script as an argument. Parameters can be passed to the script as additional arguments; the name of list file is specified as a parameter.

      +
      $ sbatch job.sh list.dat
      +
      +
      +

      If the list file is not specified, list.dat is used by default.

      +
    • +
    • In case of ISSP system C (kugui)

      +

      In kugui, PBS is used for the job scheduling system. In order to submit a batch job, a command qsub is invoked with the job script. There is no way to pass parameters to the script, and thus the name of the list file is fixed to list.dat.

      +
      $ qsub job.sh
      +
      +
      +
    • +
    +
    +
  6. +
  7. Check the status of the calculation

    +
    +

    After the job finishes, you may run the following command

    +
    $ moller_status input.yaml list.dat
    +
    +
    +

    to obtain a report whether the calculation for each parameter set has been completed successfully.

    +
    +
  8. +
  9. Retry/resume job

    +
    +

    In case the job is terminated during the execution, the job may be resumed by submitting the batch job again with the same list file. +The yet unexecuted jobs (as well as the unfinished jobs) will be run.

    +
      +
    • In case of ISSP system B (ohtaka)

    • +
    +
    $ sbatch job.sh list.dat
    +
    +
    +

    To retry the failed tasks, the batch job is submitted with --retry command line option.

    +
    $ sbatch job.sh --retry list.dat
    +
    +
    +
      +
    • In case of ISSP system C (kugui)

    • +
    +

    For kugui, to retry the failed tasks, the batch job script should be edited so that retry=0 is changed to be retry=1.

    +
    $ qsub job.sh
    +
    +
    +

    Then, the batch job is submitted as above.

    +
    +
  10. +
+
+

References

+

[1] O. Tange, GNU Parallel - The command-Line Power Tool, ;login: The USENIX Magazine, February 2011:42-47.

+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/command/index.html b/manual/v1.0.0/en/html/moller/command/index.html new file mode 100644 index 0000000..8897888 --- /dev/null +++ b/manual/v1.0.0/en/html/moller/command/index.html @@ -0,0 +1,206 @@ + + + + + + + + 4. Command reference — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

4. Command reference

+
+

4.1. moller

+
+

Generate a batch job script for comprehensive calculation

+
+

SYNOPSIS:

+
+
moller [-o job_script] input_yaml
+
+
+
+

DESCRIPTION:

+
+

This program reads a job description file specified by input_yaml, and generates a batch job script. It takes the following command line options.

+
    +
  • -o, --output job_script

    +

    specifies output file name. This option supersedes the output_file parameter in the job description file. If no output file is specified, the result is written to the standard output.

    +
  • +
  • -h

    +

    displays help and exits.

    +
  • +
+
+
+
+

4.2. moller_status

+
+

Reports the status of comprehensive calculation jobs

+
+

SYNOPSIS:

+
+
moller_status [-h] [--text|--csv|--html] [--ok|--failed|--skipped|--collapsed|--yet] [-o output_file] input_yaml [list_file]
+
+
+
+

DESCRIPTION:

+
+

This program summarizes the status of tasks in jobs that are executed through the job scripts generated by moller, and outputs a report. The tasks are obtained from the job description file specified by input_yaml. The list of jobs is read from the file specified by list_file. If it is not provided, the job list is extracted from the log files. +The format of the output is specified by a command line option. The default is the text format. The output file is specified by the -o or --output option. If it is not specified, the output is written to the standard output.

+
    +
  • output formats

    +

    specifies the format of the output by one of the following options. If more than one option are specified, the program terminates with error. The default is the text format.

    +
      +
    • --text +displays in text format.

    • +
    • --csv +displays in CSV (comma-separated values) format.

    • +
    • --html +displays in HTML format.

    • +
    +
  • +
  • input_yaml

    +

    specifies the job description file for moller.

    +
  • +
  • list_file

    +

    specifies the file that contains list of job directories. If this file is not specified, the list will be obtained from the logfile of the batch job stat_{task}.dat.

    +
  • +
  • -o, --output output_file

    +

    specifies the output file name. If it is omitted, the result is written to the standard output.

    +
  • +
  • filter options

    +

    specifies the status of jobs to be displayed by one of the following options. All jobs are displayed by default.

    +
      +
    • --ok +displays only jobs whose tasks are all completed successfully.

    • +
    • --failed +displays jobs, any of whose tasks are failed with errors, skipped, or not performed.

    • +
    • --skipped +displays jobs, any of whose tasks are skipped.

    • +
    • --yet +displays jobs, any of whose tasks are not yet performed.

    • +
    • --collapsed +displays jobs, any of whose tasks are failed with errors.

    • +
    • --all +displays all jobs. (default)

    • +
    +
  • +
  • -h

    +

    displays help and exits.

    +
  • +
+
+

FILES:

+
+

When the programs are executed concurrently using the job script generated by moller, the status of the tasks are written in log files stat_{task}.dat. moller_status reads these log files and makes a summary.

+
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/filespec/index.html b/manual/v1.0.0/en/html/moller/filespec/index.html new file mode 100644 index 0000000..3d0e70c --- /dev/null +++ b/manual/v1.0.0/en/html/moller/filespec/index.html @@ -0,0 +1,253 @@ + + + + + + + + 5. File format — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

5. File format

+
+

5.1. Job description file

+

A job description file contains configurations to generate a batch job script by moller. It is prepared in text-based YAML format. This file consists of the following parts:

+
+
    +
  1. General settings: specifies job names and output files.

  2. +
  3. platform section: specifies the system on which batch jobs are executed, and the settings for the batch jobs.

  4. +
  5. prologue and epilogue sections: specifies initial settings and finalization within the batch job.

  6. +
  7. jobs section: specifies tasks to be carried out in the betch job script.

  8. +
+
+
+

General settings

+
+

name

+
+

specifies the name of the batch job. If it is not given, the job name is left unspecified. (Usually the name of the job script is used as the job name.)

+
+

description

+
+

provides the description of the batch job. It is regarded as comments.

+
+

output_file

+
+

specifies the output file name. When the output file is given by a command-line option, the command-line parameter is used. When none of them is specified, the result is written to the standard output.

+
+
+
+
+

platform

+
+

system

+
+

specifies the target system. At present, either ohtaka or kugui is accepted.

+
+

queue

+
+

specifies the name of batch queue. The actual value depends on the target system.

+
+

node

+
+

specifies the number of nodes to be used. It is given by an integer specifying the number of nodes, or a list of integers specifying [ number of nodes, number of cores per node ]. The accepted range of parameters depends on the system and queue settings. (The number of cores is accepted for kugui and default systems; otherwise it is ignored.)

+
+

core

+
+

specifies the number of cores per node be used. The accepted range of parameters depends on the system and queue settings. If both the second parameter of node and core are specified, the value in core is used. (This parameter is accepted for kugui and default target systems.)

+
+

elapsed

+
+

specifies the elapsed time of the batch job in HH:MM:SS format.

+
+

options

+
+

specifies other batch job options. It is given as a list of options or as a multiple-line string with options in each line. The heading directives (e.g. #PBS or #SBATCH) are not included. The examples are given as follows.

+
    +
  • an example of SLURM job script in the string format:

    +
    options: |
    +  --mail-type=BEGIN,END,FAIL
    +  --mail-user=user@sample.com
    +  --requeue
    +
    +
    +
  • +
  • an example of PBS job script in the list format:

    +
    options:
    +  - -m bea
    +  - -M user@sample.com
    +  - -r y
    +
    +
    +
  • +
+
+
+
+
+

prologue, epilogue

+

prologue section specifies the commands to be run prior to executing the tasks. It is used, for example, to set environment variables of libraries and paths. +epilogue section specifies the commands to be run after all tasks have been completed.

+
+

code

+
+

specifies the content of the commands in the form of shell script. It is embedded in the batch job script, and executed within the batch job.

+
+
+
+
+

jobs

+

jobs section specifies a sequence of tasks in a table format, with the task names as keys and the contents as values.

+
+

key

+
+

name of task

+
+

value

+
+

a table that consists of the following items:

+
+

description

+
+

provides the description of the task. It is regarded as comments.

+
+

node

+
+

specifies the degree of parallelism in one of the following formats.

+
    +
  • [ number of processes, number of threads per process ]

  • +
  • [ number of nodes, number of processes, number of threads per process ]

  • +
  • number of nodes

  • +
+

When the number of nodes is specified, the specified number of nodes are exclusively assigned to a job. Otherwise, if the required number of cores for a job is smaller than the number of cores in a node, more than one job may be allocated in a single node. If a job uses more than one node, the required number of nodes are exclusively assigned.

+
+

parallel

+
+

This parameter is set to true if the tasks of different jobs are executed in parallel. It is set to false if they are executed sequentially. The default value is true.

+
+

run

+
+

The content of the task is described in the form of shell script. The executions of MPI parallel programs or MPI/OpenMPI hybrid parallel programs are specified by

+
srun prog [arg1, ...]
+
+
+

where, in addition to the keyword srun, mpirun or mpiexec is accepted. In the resulting job script, they are replaced by the command (e.g. srun or mpirun) and the degree of parallelism specified by node parameter.

+
+
+
+
+
+
+
+

5.2. List file

+

This file contains a list of jobs. It is a text file with a job name in a line (The name of the directory is associated with the name of the job).

+

moller assumes that a directory is assigned to each job, and the tasks of the job are executed within the directory. These directories are supposed to be located in the directory where the batch job is submitted.

+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/index.html b/manual/v1.0.0/en/html/moller/index.html new file mode 100644 index 0000000..6fa6e6c --- /dev/null +++ b/manual/v1.0.0/en/html/moller/index.html @@ -0,0 +1,152 @@ + + + + + + + + Comprehensive Calculation Utility (moller) — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/tutorial/basic.html b/manual/v1.0.0/en/html/moller/tutorial/basic.html new file mode 100644 index 0000000..ada8621 --- /dev/null +++ b/manual/v1.0.0/en/html/moller/tutorial/basic.html @@ -0,0 +1,336 @@ + + + + + + + + 3.1. Basic usage — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

3.1. Basic usage

+

The procedure to use the batch job script generator moller consists of the following steps: +First, a job description file is prepared that defines the tasks to be executed. Next, the program moller is to be run with the job description file, and a batch job script is generated. The script is then transferred to the target supercomputer system. A batch job is submitted with the script to perform calculations. +In this tutorial, we will explain the steps along a sample in docs/tutorial/moller.

+
+

Prepare job description file

+

A job description file describes the content of calculations that are carried out in a batch job. +Here, a batch job is used for a set of instructions submitted to job schedulers running on supercomputer systems. +On the other hand, for the concurrent execution of programs that moller handles, we call a series of program executions performed for one set of parameters by a job. A job may consist of several contents that we call tasks. moller organizes job execution so that each task is run in parallel, and the synchronization between the jobs is taken at every start and end of the tasks.

+
+Tasks and jobs +
+

Fig. 3.1 An example of tasks and jobs: Three jobs #1 … #3 are carried out within a single batch job. Each job corresponds to different set of parameters. A job consists of 4 tasks. Each task is run in parallel among these three jobs.

+
+
+

An example of job description file is presented in the following. A job description file is in text-based YAML format. It contains parameters concerning the platform and the batch job, task descriptions, and pre/post-processes.

+
name: testjob
+description: Sample task file
+
+platform:
+  system:  ohtaka
+  queue:   i8cpu
+  node:    1
+  elapsed: 00:10:00
+
+prologue:
+  code: |
+    module purge
+    module load oneapi_compiler/2023.0.0 openmpi/4.1.5-oneapi-2023.0.0-classic
+
+    ulimit -s unlimited
+
+    source /home/issp/materiapps/intel/parallel/parallelvars-20210622-1.sh
+
+jobs:
+  start:
+    parallel: false
+    run: |
+      echo "start..."
+
+  hello:
+    description: hello world
+    node: [1,1]
+    run: |
+      echo "hello world." > result.txt
+      sleep 2
+
+  hello_again:
+    description: hello world again
+    node: [1,1]
+    run: |
+      echo "hello world again." >> result.txt
+      sleep 2
+
+epilogue:
+  code: |
+    echo "done."
+    date
+
+
+

In the platform section, you can specify the type of platform on which to execute. +In this case, settings for the System B (ohtaka) are being made.

+

The prologue section describes the preprocessing of the batch job. +It details the common command line to be executed before running the task.

+

In the jobs section, the content of the task processing is described. +The series of tasks to be executed in the job are described in a table format, +with the task name as the key and the processing content as the value.

+

In this example, a task that first outputs “start…” is defined with the task name “start”. +Here, it is set to parallel = false. +In this case, the content of run parameter is executed sequentially.

+

Next, a task that outputs “hello world.” is defined with the task name “hello_world” . +Here, since “parallel” is not set, it is treated as parallel = true. +In this case, parallel processing is performed on a per-job basis. +Similarly, next, a task that outputs “hello world again.” is defined with the task name “hello_again”.

+

Finally, in the epilogue section, the post-processing of the batch job is described. +It details the common command line to be executed after running the task.

+

For more details on the specifications, please refer to the chapter File Format.

+
+
+

Generate batch job script

+

moller is to be run with the job description file (input.yaml) as an input as follows:

+
$ moller -o job.sh input.yaml
+
+
+

A batch job script is generated and written to a file specified by the parameter in the job description file, or the command line option -o or --output. If both specified, the command line option is used. If neither specified, the result is written to the standard output.

+

The obtained batch job script is to be transferred to the target system as required. It is noted that the batch job script is prepared for bash; users may need to set the shell for job execution to bash. (A care should be needed if the login shell is set to csh-type.)

+
+
+

Create list file

+

A list of jobs is to be created. moller is designed so that each job is executed within a directory prepared for the job with the job name. The job list can be created, for example, by the following command:

+
$ /usr/bin/ls -1d * > list.dat
+
+
+

In this tutorial, an utility script make_inputs.sh is enclosed which generates datasets and a list file.

+
$ bash ./make_inputs.sh
+
+
+

By running the above command, a directory output and a set of subdirectories dataset-0001dataset-0020 that correspond to datasets, and a list file list.dat are created.

+
+
+

Run batch job

+

The batch job is to be submitted to the job scheduler with the batch job script. +In this example, the job script and the input parameter files are copied into the output directory, and the current directory is changed to ``output` as follows:

+
$ cp job.sh input.yaml output/
+$ cd output
+
+
+

In ohtaka, slurm is used for the job scheduling system. In order to submit a batch job, a command sbatch is invoked with the job script as an argument. +Parameters can be passed to the script as additional arguments; the name of list file is specified as a parameter.

+
$ sbatch job.sh list.dat
+
+
+

Files named ‘result.txt’ will be generated in each directory listed on the list.dat. +You can confirm that the ‘result.txt’ contains the strings ‘hello world.’ and ‘hello world again.’ as the job results.

+
+
+

Check status

+

The status of execution of the tasks are written to log files. A tool named moller_status is provided to generate a summary of the status of each job from the log files. It is invoked by the following command in the directory where the batch job is executed:

+
$ moller_status input.yaml list.dat
+
+
+

The command takes the job description file input.yaml and the list file list.dat as arguments. The list file may be omitted; in this case, the information of the jobs are extracted from the log files.

+

An example of the output is shown below:

+
| job          | hello   | hello_again   |
+|--------------|---------|---------------|
+| dataset-0001 | o       | o             |
+| dataset-0002 | o       | o             |
+| dataset-0003 | o       | o             |
+| dataset-0004 | o       | o             |
+| dataset-0005 | o       | o             |
+| dataset-0006 | o       | o             |
+| dataset-0007 | o       | o             |
+| dataset-0008 | o       | o             |
+| dataset-0009 | o       | o             |
+| dataset-0010 | o       | o             |
+| dataset-0011 | o       | o             |
+| dataset-0012 | o       | o             |
+| dataset-0013 | o       | o             |
+| dataset-0014 | o       | o             |
+| dataset-0015 | o       | o             |
+| dataset-0016 | o       | o             |
+| dataset-0017 | o       | o             |
+| dataset-0018 | o       | o             |
+| dataset-0019 | o       | o             |
+| dataset-0020 | o       | o             |
+
+
+

where “o” corresponds to a task that has been completed successfully, “x” corresponds to a failed task, “-” corresponds to a skipped task because the previous task has been terminated with errors, and “.” corresponds to a task yet unexecuted. +In the above example, the all tasks have been completed successfully.

+
+
+

Rerun failed tasks

+

If a task fails, the subsequent tasks within the job will not be executed. +The following is an example of job status in which each task fails by 10% change.

+
| job          | task1   | task2   | task3   |
+|--------------|---------|---------|---------|
+| dataset_0001 | o       | o       | o       |
+| dataset_0002 | o       | x       | -       |
+| dataset_0003 | x       | -       | -       |
+| dataset_0004 | x       | -       | -       |
+| dataset_0005 | o       | o       | o       |
+| dataset_0006 | o       | o       | o       |
+| dataset_0007 | o       | x       | -       |
+| dataset_0008 | o       | o       | o       |
+| dataset_0009 | o       | o       | x       |
+| dataset_0010 | o       | o       | o       |
+| dataset_0011 | o       | o       | o       |
+| dataset_0012 | o       | o       | o       |
+| dataset_0013 | o       | x       | -       |
+| dataset_0014 | o       | o       | o       |
+| dataset_0015 | o       | o       | o       |
+| dataset_0016 | o       | o       | o       |
+| dataset_0017 | o       | o       | o       |
+| dataset_0018 | o       | o       | o       |
+| dataset_0019 | o       | o       | o       |
+| dataset_0020 | o       | o       | o       |
+
+
+

There, the jobs of dataset_0003 and dataset_0004 failed at task1, and the subsequent task2 and task3 were not executed. The other jobs were successful at task1, and proceeded to task2. +In this way, each job is executed independently of other jobs.

+

Users can rerun the failed tasks by submitting the batch job with the retry option. +For SLURM job scheduler (e.g. used in ISSP system B), resubmit the job as follows:

+
$ sbatch job.sh --retry list.dat
+
+
+

For PBS job scheduler (e.g. used in ISSP system C), edit the job script so that the line retry=0 is replaced by retry=1, and resubmit the job.

+
| job          | task1   | task2   | task3   |
+|--------------|---------|---------|---------|
+| dataset_0001 | o       | o       | o       |
+| dataset_0002 | o       | o       | x       |
+| dataset_0003 | o       | x       | -       |
+| dataset_0004 | o       | o       | o       |
+| dataset_0005 | o       | o       | o       |
+| dataset_0006 | o       | o       | o       |
+| dataset_0007 | o       | o       | o       |
+| dataset_0008 | o       | o       | o       |
+| dataset_0009 | o       | o       | o       |
+| dataset_0010 | o       | o       | o       |
+| dataset_0011 | o       | o       | o       |
+| dataset_0012 | o       | o       | o       |
+| dataset_0013 | o       | o       | o       |
+| dataset_0014 | o       | o       | o       |
+| dataset_0015 | o       | o       | o       |
+| dataset_0016 | o       | o       | o       |
+| dataset_0017 | o       | o       | o       |
+| dataset_0018 | o       | o       | o       |
+| dataset_0019 | o       | o       | o       |
+| dataset_0020 | o       | o       | o       |
+
+
+

The tasks that have failed will be executed in the second run. +In the above example, the task1 for dataset_0003 was successful, but the task2 failed. +For dataset_0004, task1, task2, and task3 were successfully executed. +For the jobs of datasets whose tasks have already finished successfully, the second run will not do anything.

+

N.B. the list file must not be modified on the rerun. The jobs are managed according to the order of entries in the list file, and therefore, if the order is changed, the jobs will not be executed properly.

+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/tutorial/dsqss.html b/manual/v1.0.0/en/html/moller/tutorial/dsqss.html new file mode 100644 index 0000000..2457b43 --- /dev/null +++ b/manual/v1.0.0/en/html/moller/tutorial/dsqss.html @@ -0,0 +1,203 @@ + + + + + + + + 3.3. Example for moller calculation with DSQSS — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

3.3. Example for moller calculation with DSQSS

+
+

What’s this sample?

+

This is an example of moller with +DSQSS, which is an +open-source software package for performing the path-integral Monte +Calro method for quantum many-body problem. In this example, we will +calculate the temperature dependence of the magnetic susceptibilities +\(\chi\) of the \(S=1/2\) (\(M=1\) in the terms of DSQSS) +and \(S=1\) (\(M=2\)) antiferromagnetic Heisenberg chain under +the periodic boundary condition with several length. By using +moller, calculations with different parameters (\(M, L, T\)) are +performed in parallel.

+

This example is corresponding to one of the official +tutorials.

+
+
+

Preparation

+

Make sure that moller (HTP-tools) package and DSQSS are +installed. In this tutorial, the calculation will be performed using the +supercomputer system ohtaka at ISSP.

+
+
+

How to run

+
    +
  1. Prepare dataset

    +

    Run the script make_inputs.sh enclosed within this package.

    +
    $ bash ./make_inputs.sh
    +
    +
    +

    This make an output directory (if already exists, first removed +then make again). Under output, working directories for each +parameter like L_8__M_1__T_1.0 will be generated. A list of the +directories is written to a file list.dat.

    +
  2. +
  3. Generate job script using moller

    +

    Generate a job script from the job description file using moller, +and store the script as a file named job.sh.

    +
    $ moller -o job.sh input.yaml
    +
    +
    +

    Then, copy job.sh in the output directory, and change +directory to output.

    +
  4. +
  5. Run batch job

    +

    Submit a batch job with the job list as an argument.

    +
    $ sbatch job.sh list.dat
    +
    +
    +
  6. +
  7. Check status

    +

    The status of task execution will be summarized by moller_status +program.

    +
    $ moller_status input.yaml list.dat
    +
    +
    +
  8. +
  9. Gather results

    +

    After calculation finishes, gather result by

    +
    $ python3 ../extract_result.py list.dat
    +
    +
    +

    This script writes results into a text file result.dat which has +5 columns, \(M\), \(L\), \(T\), mean of \(\chi\), and +stderr of \(\chi\).

    +

    To visualize the results, GNUPLOT files plot_M1.plt and +plot_M2.plt are available.

    +
    $ gnuplot --persist plot_M1.plt
    +$ gnuplot --persist plot_M2.plt
    +
    +
    +

    susceptibilities for S=1/2 susceptibilities for S=2

    +

    The main different between \(S=1/2\) and \(S=1\) AFH chains +is whether the excitation gap vanishes (\(S=1/2\)) or remains +(\(S=1\)). Reflecting this, the magnetic susceptibility in the +very low temperature region remains finite (\(S=1/2\)) or +vanishes (\(S=1\)). Note that for the \(S=1/2\) case, the +finite size effect opens the spin gap and therefore the magnetic +susceptibility of small chains drops.

    +
  10. +
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/tutorial/hphi.html b/manual/v1.0.0/en/html/moller/tutorial/hphi.html new file mode 100644 index 0000000..dac4df3 --- /dev/null +++ b/manual/v1.0.0/en/html/moller/tutorial/hphi.html @@ -0,0 +1,209 @@ + + + + + + + + 3.2. Example for moller calculation with HPhi — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

3.2. Example for moller calculation with HPhi

+
+

What’s this sample?

+

This is an example of moller with +HPhi, which is an +open-source software package for performing the exact diagonalization +method for quantum many-body problems. In this example, we will +calculate the system size dependence of the excitation gap +\(\Delta\) of the \(S=1/2\) (2S_1 directory) and \(S=1\) +(2S_2) antiferromagnetic Heisenberg chain under the periodic +boundary condition. By using moller, calculations with different +system sizes are performed in parallel. This is corresponding to +section +1.4 +of HPhi’s official tutorial.

+
+
+

Preparation

+

Make sure that moller (HTP-tools) package and HPhi are +installed. In this tutorial, the calculation will be performed using the +supercomputer system ohtaka at ISSP.

+
+
+

How to run

+
    +
  1. Prepare dataset

    +

    Run the script make_inputs.sh enclosed within this package.

    +
    $ bash ./make_inputs.sh
    +
    +
    +

    Working directories L_8, L_10, …, L_24 (up to L_18 +for 2S_2)) will be generated. A list of the directories is +written to a file list.dat. Additionally, a shell script, +extract_gap.sh, to gather energy gaps from working directories is +generated.

    +
  2. +
  3. Generate job script using moller

    +

    Generate a job script from the job description file using moller, +and store the script as a file named job.sh.

    +
    $ moller -o job.sh input.yaml
    +
    +
    +
  4. +
  5. Run batch job

    +

    Submit a batch job with the job list as an argument.

    +
    $ sbatch job.sh list.dat
    +
    +
    +
  6. +
  7. Check status

    +

    The status of task execution will be summarized by moller_status +program.

    +
    $ moller_status input.yaml list.dat
    +
    +
    +
  8. +
  9. Gather results

    +

    Once the calculation finishes, gather energy gaps from jobs as

    +
    $ bash extract_gap.sh
    +
    +
    +

    This script writes pairs of the length \(L\) and the gap +\(\Delta\) into a text file, gap.dat.

    +

    To visualize the results, a Gnuplot file gap.plt is available. In +this file, the obtained gap data are fitted by the expected curves,

    +
    +(3.1)\[\Delta(L; S=1/2) = \Delta_\infty + A/L\]
    +

    and

    +
    +(3.2)\[\Delta(L; S=1) = \Delta_\infty + B\exp(-CL).\]
    +

    The result is plotted as follows:

    +
    $ gnuplot --persist gap.plt
    +
    +
    +
    +Finite size effect of spin gap +
    +

    Fig. 3.3 Finite size effect of spin gap

    +
    +
    +

    Note that the logarithmic correction causes the spin gap for +\(S=1/2\) to remain finite. On the other hand, for \(S=1\), +the extrapolated value \(\Delta_\infty = 0.417(1)\) is consistent +with the previous results, e.g., \(\Delta_\infty = 0.41048(6)\) +by QMC (Todo and Kato, PRL 87, 047203 (2001)).

    +
  10. +
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/moller/tutorial/index.html b/manual/v1.0.0/en/html/moller/tutorial/index.html new file mode 100644 index 0000000..330382a --- /dev/null +++ b/manual/v1.0.0/en/html/moller/tutorial/index.html @@ -0,0 +1,144 @@ + + + + + + + + 3. Tutorial — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/objects.inv b/manual/v1.0.0/en/html/objects.inv new file mode 100644 index 0000000..b7ee2c3 --- /dev/null +++ b/manual/v1.0.0/en/html/objects.inv @@ -0,0 +1,5 @@ +# Sphinx inventory version 2 +# Project: Moller Users Guide +# Version: 1.0 +# The remainder of this file is compressed using zlib. +xڍN Auգ߃IM`aP@R֒72F%8$١!z}#5W2ioZJuhy}/RXb {wHpke$31!- \ No newline at end of file diff --git a/manual/v1.0.0/en/html/search.html b/manual/v1.0.0/en/html/search.html new file mode 100644 index 0000000..34f2ebe --- /dev/null +++ b/manual/v1.0.0/en/html/search.html @@ -0,0 +1,123 @@ + + + + + + + Search — Moller Users Guide 1.0.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +

Search

+ + + + +

+ Searching for multiple words only shows matches that contain + all words. +

+ + +
+ + + +
+ + + +
+ +
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/en/html/searchindex.js b/manual/v1.0.0/en/html/searchindex.js new file mode 100644 index 0000000..232b656 --- /dev/null +++ b/manual/v1.0.0/en/html/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["index", "moller/about/index", "moller/appendix/index", "moller/basic-usage", "moller/command/index", "moller/filespec/index", "moller/index", "moller/tutorial/basic", "moller/tutorial/dsqss", "moller/tutorial/hphi", "moller/tutorial/index"], "filenames": ["index.rst", "moller/about/index.rst", "moller/appendix/index.rst", "moller/basic-usage.rst", "moller/command/index.rst", "moller/filespec/index.rst", "moller/index.rst", "moller/tutorial/basic.rst", "moller/tutorial/dsqss.rst", "moller/tutorial/hphi.rst", "moller/tutorial/index.rst"], "titles": ["Moller Users Guide", "1. Introduction", "6. Extension guide", "2. Installation and basic usage", "4. Command reference", "5. File format", "Comprehensive Calculation Utility (moller)", "3.1. Basic usage", "3.3. Example for moller calculation with DSQSS", "3.2. Example for moller calculation with HPhi", "3. Tutorial"], "terms": {"comprehens": [0, 3, 4], "calcul": [0, 2, 3, 4, 7, 10], "util": [0, 3, 7], "introduct": [0, 6], "instal": [0, 6, 8, 9], "basic": [0, 6, 10], "usag": [0, 2, 6, 10], "tutori": [0, 3, 6, 7, 8, 9], "command": [0, 2, 3, 5, 6, 7], "refer": [0, 2, 3, 6, 7], "file": [0, 2, 3, 4, 6, 8, 9, 10], "format": [0, 2, 3, 4, 6, 7], "extens": [0, 6], "In": [1, 2, 3, 5, 7, 8, 9], "recent": 1, "year": 1, "us": [1, 2, 3, 4, 5, 7, 8, 9], "machin": 1, "learn": 1, "predict": 1, "materi": 1, "properti": 1, "design": [1, 7], "substanc": 1, "known": [1, 2], "informat": 1, "ha": [1, 2, 3, 7, 8], "gain": 1, "consider": 1, "attent": 1, "The": [1, 2, 3, 4, 5, 7, 8, 9], "accuraci": 1, "depend": [1, 2, 3, 5, 8, 9], "heavili": 1, "prepar": [1, 3, 5, 10], "appropri": [1, 2], "train": 1, "data": [1, 9], "therefor": [1, 2, 7, 8], "develop": [1, 2], "tool": [1, 2, 3, 7, 8, 9], "rapid": 1, "gener": [1, 2, 3, 4, 8, 9, 10], "expect": [1, 9], "contribut": 1, "significantli": 1, "advanc": 1, "research": 1, "provid": [1, 2, 4, 5, 7], "part": [1, 2, 5], "htp": [1, 3, 8, 9], "packag": [1, 8, 9], "support": 1, "high": 1, "throughput": 1, "comput": [1, 2, 3], "It": [1, 2, 3, 4, 5, 7], "batch": [1, 2, 3, 4, 5, 8, 9, 10], "job": [1, 3, 4, 6, 8, 9, 10], "script": [1, 3, 4, 5, 8, 9, 10], "supercomput": [1, 2, 3, 7, 8, 9], "cluster": 1, "allow": 1, "parallel": [1, 2, 3, 5, 7, 8, 9], "execut": [1, 3, 4, 5, 6, 7, 8, 9], "program": [1, 2, 3, 4, 5, 7, 8, 9], "under": [1, 8, 9], "seri": [1, 7], "condit": [1, 3, 8, 9], "paramet": [1, 2, 3, 4, 5, 7, 8], "current": [1, 7], "ohtaka": [1, 2, 3, 5, 7, 8, 9], "slurm": [1, 3, 5, 7], "schedul": [1, 3, 7], "kugui": [1, 2, 3, 5], "pb": [1, 3, 5, 7], "institut": 1, "solid": 1, "state": 1, "physic": 1, "univers": 1, "tokyo": 1, "distribut": [1, 2], "sourc": [1, 3, 7, 8, 9], "code": [1, 2, 5, 7], "follow": [1, 2, 3, 4, 5, 7, 9], "gnu": [1, 2, 3], "public": 1, "version": [1, 2], "3": [1, 2, 3, 7], "gpl": 1, "v3": 1, "later": 1, "thi": [1, 2, 4, 5, 7, 10], "softwar": [1, 8, 9], "wa": [1, 7], "ver": 1, "1": [1, 2, 3, 7, 8, 9], "0": [1, 3, 7, 8, 9], "releas": 1, "2024": 1, "03": 1, "06": 1, "beta": 1, "2023": [1, 7], "12": 1, "28": 1, "kazuyoshi": 1, "yoshimi": 1, "instutit": 1, "tatsumi": 1, "aoyama": 1, "yuichi": 1, "motoyama": 1, "masahiro": 1, "fukuda": 1, "kota": 1, "ido": 1, "tetsuya": 1, "fukushima": 1, "nation": 1, "industri": 1, "scienc": 1, "technologi": 1, "aist": 1, "shusuk": 1, "kasamatsu": 1, "yamagata": 1, "takashi": 1, "koretsun": 1, "tohoku": 1, "project": 1, "corrdin": 1, "taisuk": 1, "ozaki": 1, "all": [1, 2, 4, 5, 7], "right": 1, "reserv": 1, "usabl": 1, "test": 1, "platform": [1, 2, 3, 7], "ubuntu": 1, "linux": 1, "python3": [1, 3, 8], "n": [2, 7], "b": [2, 3, 7, 9], "content": [2, 5, 7], "section": [2, 3, 5, 7, 9], "mai": [2, 3, 5, 7], "vari": 2, "A": [2, 5, 7, 8, 9], "mean": [2, 8], "set": [2, 3, 7], "small": [2, 8], "task": [2, 3, 4, 5, 8, 9, 10], "ar": [2, 3, 4, 5, 7, 8, 9], "within": [2, 5, 7, 8, 9], "singl": [2, 5, 7], "submit": [2, 3, 5, 7, 8, 9], "larg": 2, "queue": [2, 5, 7], "i": [2, 3, 4, 5, 6, 7, 8, 9], "schemat": 2, "shown": [2, 7], "which": [2, 3, 5, 7, 8, 9], "launch": 2, "background": 2, "process": [2, 5, 7], "wait": 2, "statement": 2, "invok": [2, 3, 7], "complet": [2, 3, 4, 5, 7], "param_1": 2, "param_2": 2, "param_n": 2, "To": [2, 3, 8, 9], "manag": [2, 7], "requir": [2, 3, 5, 7], "node": [2, 3, 5, 7], "core": [2, 5], "alloc": [2, 5], "over": 2, "so": [2, 3, 7], "thei": [2, 5], "distinct": 2, "also": [2, 3], "need": [2, 3, 7], "arrang": 2, "where": [2, 3, 5, 7], "most": 2, "run": [2, 3, 5, 10], "simultan": 2, "accord": [2, 7], "resourc": 2, "hereaft": 2, "denot": 2, "concurr": [2, 3, 4, 7], "control": 2, "take": [2, 4, 7], "list": [2, 3, 4, 6, 8, 9, 10], "hold": 2, "item": [2, 5], "each": [2, 3, 5, 7, 8], "an": [2, 3, 5, 7, 8, 9], "exampl": [2, 5, 6, 7, 10], "given": [2, 3, 5], "dat": [2, 3, 4, 7, 8, 9], "contain": [2, 3, 4, 5, 7], "line": [2, 3, 4, 5, 7], "cat": 2, "j": 2, "number": [2, 5], "determin": 2, "runtim": 2, "from": [2, 4, 7, 8, 9], "obtain": [2, 3, 4, 7, 9], "environ": [2, 5, 6], "degre": [2, 5], "thread": [2, 5], "specifi": [2, 3, 4, 5, 7], "wai": [2, 3, 7], "assign": [2, 5], "For": [2, 3, 7], "call": [2, 7], "srun": [2, 5], "exploit": 2, "option": [2, 3, 4, 5, 7], "exclus": [2, 5], "explicit": 2, "On": [2, 7, 9], "hand": [2, 7, 9], "do": [2, 7], "have": [2, 5, 7], "handl": [2, 7], "divid": 2, "slot": 2, "divis": 2, "kept": 2, "form": [2, 5], "tabl": [2, 5, 7], "variabl": [2, 5], "host": 2, "pin": 2, "through": [2, 4], "mpirun": [2, 5], "mpiexec": [2, 5], "mpi": [2, 5], "implement": 2, "o": [2, 3, 4, 7, 8, 9], "tang": [2, 3], "power": [2, 3], "login": [2, 3, 7], "usenix": [2, 3], "magazin": [2, 3], "februari": [2, 3], "2011": [2, 3], "42": [2, 3], "47": [2, 3], "read": [2, 4], "input": [2, 3, 7, 8, 9], "yaml": [2, 3, 5, 7, 8, 9], "describ": [2, 3, 5, 7], "header": 2, "prologu": [2, 7], "correspond": [2, 7, 8, 9], "block": 2, "written": [2, 4, 5, 7, 8, 9], "definit": 2, "next": [2, 7], "accept": [2, 5], "addit": [2, 3, 5, 7], "argument": [2, 3, 7, 8, 9], "submiss": 2, "sbatch": [2, 3, 5, 7, 8, 9], "pass": [2, 3, 7], "name": [2, 3, 4, 5, 7, 8, 9], "retri": [2, 3, 7], "can": [2, 3, 7], "ignor": [2, 5], "fix": [2, 3], "default": [2, 3, 4, 5], "enabl": 2, "modifi": [2, 7], "when": [2, 4, 5], "more": [2, 4, 5, 7], "than": [2, 4, 5], "one": [2, 4, 5, 7, 8], "procedur": [2, 7], "appli": 2, "fals": [2, 5, 7], "true": [2, 5, 7], "creat": [2, 3, 10], "task_": 2, "pre": [2, 7], "keyword": [2, 5], "substitut": 2, "epilogu": [2, 7], "main": [2, 3, 8], "briefli": 2, "below": [2, 7], "run_parallel": 2, "perform": [2, 4, 7, 8, 9], "statu": [2, 3, 4, 8, 9, 10], "_find_multipl": 2, "find": 2, "actual": [2, 3, 5], "wrap": 2, "_run_parallel_task": 2, "deal": 2, "nest": 2, "separ": [2, 4], "out": [2, 5, 7], "_setup_run_parallel": 2, "account": 2, "inform": [2, 7], "summar": [2, 4, 8, 9], "_nnode": 2, "slurm_nnod": 2, "_ncore": 2, "slurm_cpus_on_nod": 2, "_node": 2, "uniqu": 2, "pbs_nodefil": 2, "entri": [2, 7], "search": 2, "order": [2, 3, 7], "examin": 2, "ncpu": 2, "profession": 2, "omp_num_thread": 2, "moller_cor": 2, "ppn": 2, "supplement": 2, "some": 2, "befor": [2, 7], "export": 2, "noth": 2, "2": [2, 7, 8, 9], "directori": [2, 3, 4, 5, 7, 8, 9], "id": 2, "_setup_taskenv": 2, "up": [2, 9], "base": [2, 3, 5, 7], "_is_readi": 2, "check": [2, 3, 8, 9, 10], "preced": 2, "been": [2, 3, 5, 7], "successfulli": [2, 3, 4, 7], "If": [2, 3, 4, 5, 7], "remain": [2, 8, 9], "otherwis": [2, 5], "termin": [2, 3, 4, 7], "latest": 2, "profil": 2, "issp": [2, 3, 7, 8, 9], "place": 2, "Their": 2, "depict": 2, "factori": 2, "select": 2, "import": 2, "__init__": [2, 3], "py": [2, 3, 8], "regist": 2, "register_platform": 2, "system_nam": 2, "class_nam": 2, "becom": 2, "avail": [2, 8, 9], "specif": [2, 7], "should": [2, 3, 7], "deriv": 2, "baseslurm": 2, "string": [2, 5, 7], "return": 2, "valu": [2, 4, 5, 7, 9], "parallel_command": 2, "method": [2, 8, 9], "see": 2, "openpb": 2, "torqu": 2, "basepb": 2, "There": [2, 3, 7], "two": 2, "while": 2, "self": 2, "pbs_use_old_format": 2, "latter": 2, "per": [2, 5, 7], "128": 2, "further": 2, "overridden": 2, "relev": 2, "setup": 2, "extract": [2, 4, 7], "generate_head": 2, "generate_funct": 2, "bodi": [2, 8, 9], "generate_vari": 2, "generate_function_bodi": 2, "embed": [2, 5], "multipl": [2, 5], "intern": 2, "acquir": 2, "e": [2, 5, 7, 9], "g": [2, 5, 7, 9], "printenv": 2, "_debug": 2, "debug": 2, "output": [2, 4, 5, 7, 8], "print": 2, "dure": [2, 3], "doe": 2, "well": [2, 3], "recommend": 2, "turn": 2, "defin": [2, 7], "prerequisit": 3, "moller": [3, 5, 7, 10], "includ": [3, 5], "librari": [3, 5], "python": 3, "x": [3, 7], "ruamel": 3, "modul": [3, 7], "tabul": 3, "must": [3, 7], "server": 3, "offici": [3, 8, 9], "page": 3, "github": 3, "repositori": 3, "download": 3, "git": 3, "clone": 3, "http": 3, "com": [3, 5], "center": 3, "dev": 3, "onc": [3, 9], "you": [3, 7], "automat": 3, "same": 3, "time": [3, 5], "cd": [3, 7], "m": [3, 5, 8], "pip": 3, "moller_statu": [3, 6, 7, 8, 9], "structur": 3, "licens": [3, 6], "readm": 3, "md": 3, "pyproject": 3, "toml": 3, "doc": [3, 7], "ja": 3, "en": 3, "src": 3, "base_slurm": 3, "base_pb": 3, "base_default": 3, "function": 3, "sampl": [3, 5, 7, 10], "featur": 3, "descript": [3, 4, 6, 8, 9, 10], "first": [3, 7, 8], "detail": [3, 7], "manual": 3, "sh": [3, 7, 8, 9], "transfer": [3, 7], "note": [3, 7, 8, 9], "rel": 3, "path": [3, 5, 8], "absolut": 3, "readi": 3, "system": [3, 5, 6, 7, 8, 9], "case": [3, 7, 8], "c": [3, 7], "qsub": 3, "thu": 3, "after": [3, 5, 7, 8], "finish": [3, 7, 8, 9], "report": [3, 4], "whether": [3, 8], "resum": 3, "again": [3, 7, 8], "yet": [3, 4, 7], "unexecut": [3, 7], "unfinish": 3, "fail": [3, 4, 5, 10], "edit": [3, 7], "chang": [3, 7, 8], "Then": [3, 8], "abov": [3, 7], "synopsi": 4, "job_script": 4, "input_yaml": 4, "supersed": 4, "output_fil": [4, 5], "result": [4, 5, 7, 8, 9], "standard": [4, 5, 7], "h": 4, "displai": 4, "help": 4, "exit": 4, "text": [4, 5, 7, 8, 9], "csv": 4, "html": 4, "ok": 4, "skip": [4, 7], "collaps": 4, "list_fil": 4, "log": [4, 7], "error": [4, 7], "comma": 4, "logfil": 4, "stat_": 4, "omit": [4, 7], "filter": 4, "onli": 4, "whose": [4, 7], "ani": 4, "make": [4, 8, 9], "summari": [4, 7], "configur": 5, "consist": [5, 7, 9], "initi": 5, "final": [5, 7], "carri": [5, 7], "betch": 5, "left": 5, "unspecifi": 5, "usual": 5, "regard": 5, "comment": 5, "none": 5, "them": 5, "target": [5, 7], "At": 5, "present": [5, 7], "either": 5, "integ": 5, "rang": 5, "both": [5, 7], "second": [5, 7], "elaps": [5, 7], "hh": 5, "mm": 5, "ss": 5, "other": [5, 6, 7, 9], "head": 5, "direct": 5, "mail": 5, "type": [5, 7], "begin": 5, "end": [5, 7], "user": [5, 7], "requeu": 5, "bea": 5, "r": 5, "y": 5, "prior": 5, "shell": [5, 7, 9], "sequenc": 5, "kei": [5, 7], "smaller": 5, "differ": [5, 7, 8, 9], "sequenti": [5, 7], "openmpi": [5, 7], "hybrid": 5, "prog": 5, "arg1": 5, "replac": [5, 7], "associ": 5, "assum": 5, "These": 5, "suppos": 5, "locat": 5, "what": [6, 10], "contributor": 6, "copyright": 6, "oper": 6, "hphi": [6, 10], "dsqss": [6, 10], "guid": 6, "bulk": 6, "how": [6, 10], "work": [6, 8, 9], "extend": 6, "step": 7, "we": [7, 8, 9], "explain": 7, "along": 7, "here": 7, "instruct": 7, "sever": [7, 8], "organ": 7, "synchron": 7, "between": [7, 8], "taken": 7, "everi": 7, "start": 7, "three": 7, "4": [7, 9], "among": 7, "concern": 7, "post": 7, "testjob": 7, "i8cpu": 7, "00": 7, "10": 7, "purg": 7, "load": 7, "oneapi_compil": 7, "5": [7, 8], "oneapi": 7, "classic": 7, "ulimit": 7, "": [7, 10], "unlimit": 7, "home": 7, "materiapp": 7, "intel": 7, "parallelvar": 7, "20210622": 7, "echo": 7, "hello": 7, "world": 7, "txt": 7, "sleep": 7, "hello_again": 7, "done": 7, "date": 7, "being": 7, "made": 7, "preprocess": 7, "common": 7, "hello_world": 7, "sinc": 7, "treat": 7, "basi": 7, "similarli": 7, "pleas": 7, "chapter": 7, "neither": 7, "bash": [7, 8, 9], "care": 7, "csh": 7, "usr": 7, "bin": 7, "l": [7, 8, 9], "1d": 7, "make_input": [7, 8, 9], "enclos": [7, 8, 9], "dataset": [7, 8, 9], "By": [7, 8, 9], "subdirectori": 7, "0001": 7, "0020": 7, "copi": [7, 8], "cp": 7, "confirm": 7, "0002": 7, "0003": 7, "0004": 7, "0005": 7, "0006": 7, "0007": 7, "0008": 7, "0009": 7, "0010": 7, "0011": 7, "0012": 7, "0013": 7, "0014": 7, "0015": 7, "0016": 7, "0017": 7, "0018": 7, "0019": 7, "becaus": 7, "previou": [7, 9], "subsequ": 7, "task1": 7, "task2": 7, "task3": 7, "dataset_0001": 7, "dataset_0002": 7, "dataset_0003": 7, "dataset_0004": 7, "dataset_0005": 7, "dataset_0006": 7, "dataset_0007": 7, "dataset_0008": 7, "dataset_0009": 7, "dataset_0010": 7, "dataset_0011": 7, "dataset_0012": 7, "dataset_0013": 7, "dataset_0014": 7, "dataset_0015": 7, "dataset_0016": 7, "dataset_0017": 7, "dataset_0018": 7, "dataset_0019": 7, "dataset_0020": 7, "were": 7, "success": 7, "proceed": 7, "independ": 7, "resubmit": 7, "alreadi": [7, 8], "anyth": 7, "properli": 7, "open": [8, 9], "integr": 8, "mont": 8, "calro": 8, "quantum": [8, 9], "mani": [8, 9], "problem": [8, 9], "temperatur": 8, "magnet": 8, "suscept": 8, "chi": 8, "term": 8, "antiferromagnet": [8, 9], "heisenberg": [8, 9], "chain": [8, 9], "period": [8, 9], "boundari": [8, 9], "length": [8, 9], "t": 8, "sure": [8, 9], "exist": 8, "remov": 8, "like": 8, "l_8__m_1__t_1": 8, "store": [8, 9], "gather": [8, 9], "extract_result": 8, "write": [8, 9], "column": 8, "stderr": 8, "visual": [8, 9], "gnuplot": [8, 9], "plot_m1": 8, "plt": [8, 9], "plot_m2": 8, "persist": [8, 9], "afh": 8, "excit": [8, 9], "gap": [8, 9], "vanish": 8, "reflect": 8, "veri": 8, "low": 8, "region": 8, "finit": [8, 9], "size": [8, 9], "effect": [8, 9], "spin": [8, 9], "drop": 8, "exact": 9, "diagon": 9, "delta": 9, "2s_1": 9, "2s_2": 9, "l_8": 9, "l_10": 9, "l_24": 9, "l_18": 9, "addition": 9, "extract_gap": 9, "energi": 9, "pair": 9, "fit": 9, "curv": 9, "delta_": 9, "infti": 9, "exp": 9, "cl": 9, "plot": 9, "logarithm": 9, "correct": 9, "caus": 9, "extrapol": 9, "417": 9, "41048": 9, "6": 9, "qmc": 9, "todo": 9, "kato": 9, "prl": 9, "87": 9, "047203": 9, "2001": 9, "rerun": 10}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"moller": [0, 1, 2, 4, 6, 8, 9], "user": 0, "guid": [0, 2], "content": 0, "introduct": 1, "what": [1, 8, 9], "i": 1, "licens": 1, "contributor": 1, "copyright": 1, "oper": 1, "environ": 1, "extens": 2, "bulk": 2, "job": [2, 5, 7], "execut": 2, "how": [2, 8, 9], "work": 2, "structur": 2, "script": [2, 7], "brief": 2, "descript": [2, 5, 7], "function": 2, "extend": 2, "other": 2, "system": 2, "class": 2, "slurm": 2, "schedul": 2, "variant": 2, "pb": 2, "custom": 2, "featur": 2, "port": 2, "new": 2, "type": 2, "troubl": 2, "shoot": 2, "instal": 3, "basic": [3, 7], "usag": [3, 7], "command": 4, "refer": 4, "moller_statu": 4, "file": [5, 7], "format": 5, "gener": [5, 7], "set": 5, "platform": 5, "prologu": 5, "epilogu": 5, "list": [5, 7], "comprehens": 6, "calcul": [6, 8, 9], "util": 6, "prepar": [7, 8, 9], "batch": 7, "creat": 7, "run": [7, 8, 9], "check": 7, "statu": 7, "rerun": 7, "fail": 7, "task": 7, "exampl": [8, 9], "dsqss": 8, "": [8, 9], "thi": [8, 9], "sampl": [8, 9], "hphi": 9, "tutori": 10}, "envversion": {"sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx": 60}, "alltitles": {"Moller Users Guide": [[0, "moller-users-guide"]], "Contents:": [[0, null]], "Introduction": [[1, "introduction"]], "What is moller?": [[1, "what-is-moller"]], "License": [[1, "license"]], "Contributors": [[1, "contributors"]], "Copyright": [[1, "copyright"]], "Operating environment": [[1, "operating-environment"]], "Extension guide": [[2, "extension-guide"]], "Bulk job execution by moller": [[2, "bulk-job-execution-by-moller"]], "How moller works": [[2, "how-moller-works"]], "Structure of moller script": [[2, "structure-of-moller-script"]], "Brief description of moller script functions": [[2, "brief-description-of-moller-script-functions"]], "How to extend moller for other systems": [[2, "how-to-extend-moller-for-other-systems"]], "Class structure": [[2, "class-structure"]], "SLURM job scheduler variants": [[2, "slurm-job-scheduler-variants"]], "PBS job scheduler variants": [[2, "pbs-job-scheduler-variants"]], "Customizing features": [[2, "customizing-features"]], "Porting to new type of job scheduler": [[2, "porting-to-new-type-of-job-scheduler"]], "Trouble shooting": [[2, "trouble-shooting"]], "Installation and basic usage": [[3, "installation-and-basic-usage"]], "Command reference": [[4, "command-reference"]], "moller": [[4, "moller"]], "moller_status": [[4, "moller-status"]], "File format": [[5, "file-format"]], "Job description file": [[5, "job-description-file"]], "General settings": [[5, "general-settings"]], "platform": [[5, "platform"]], "prologue, epilogue": [[5, "prologue-epilogue"]], "jobs": [[5, "jobs"]], "List file": [[5, "list-file"]], "Comprehensive Calculation Utility (moller)": [[6, "comprehensive-calculation-utility-moller"]], "Basic usage": [[7, "basic-usage"]], "Prepare job description file": [[7, "prepare-job-description-file"]], "Generate batch job script": [[7, "generate-batch-job-script"]], "Create list file": [[7, "create-list-file"]], "Run batch job": [[7, "run-batch-job"]], "Check status": [[7, "check-status"]], "Rerun failed tasks": [[7, "rerun-failed-tasks"]], "Example for moller calculation with DSQSS": [[8, "example-for-moller-calculation-with-dsqss"]], "What\u2019s this sample?": [[8, "whats-this-sample"], [9, "whats-this-sample"]], "Preparation": [[8, "preparation"], [9, "preparation"]], "How to run": [[8, "how-to-run"], [9, "how-to-run"]], "Example for moller calculation with HPhi": [[9, "example-for-moller-calculation-with-hphi"]], "Tutorial": [[10, "tutorial"]]}, "indexentries": {}}) \ No newline at end of file diff --git a/manual/v1.0.0/en/moller-usersguide.pdf b/manual/v1.0.0/en/moller-usersguide.pdf new file mode 100644 index 0000000..00265e4 Binary files /dev/null and b/manual/v1.0.0/en/moller-usersguide.pdf differ diff --git a/manual/v1.0.0/ja/html/.buildinfo b/manual/v1.0.0/ja/html/.buildinfo new file mode 100644 index 0000000..301b0f7 --- /dev/null +++ b/manual/v1.0.0/ja/html/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 6f62da9bc64b3eae3b24ac05fef20db8 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/manual/v1.0.0/ja/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png b/manual/v1.0.0/ja/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png new file mode 100644 index 0000000..60ec48e Binary files /dev/null and b/manual/v1.0.0/ja/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png differ diff --git a/manual/v1.0.0/ja/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png.map b/manual/v1.0.0/ja/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png.map new file mode 100644 index 0000000..138a9c8 --- /dev/null +++ b/manual/v1.0.0/ja/html/_images/graphviz-06368183531b8fc81c8d91cef6b17e1dc3c86493.png.map @@ -0,0 +1,2 @@ + + diff --git a/manual/v1.0.0/ja/html/_images/task_view.png b/manual/v1.0.0/ja/html/_images/task_view.png new file mode 100644 index 0000000..44d038a Binary files /dev/null and b/manual/v1.0.0/ja/html/_images/task_view.png differ diff --git a/manual/v1.0.0/ja/html/_images/tutorial_dsqss_M1.png b/manual/v1.0.0/ja/html/_images/tutorial_dsqss_M1.png new file mode 100644 index 0000000..cd47bf4 Binary files /dev/null and b/manual/v1.0.0/ja/html/_images/tutorial_dsqss_M1.png differ diff --git a/manual/v1.0.0/ja/html/_images/tutorial_dsqss_M2.png b/manual/v1.0.0/ja/html/_images/tutorial_dsqss_M2.png new file mode 100644 index 0000000..95e2a6a Binary files /dev/null and b/manual/v1.0.0/ja/html/_images/tutorial_dsqss_M2.png differ diff --git a/manual/v1.0.0/ja/html/_images/tutorial_hphi_gap.png b/manual/v1.0.0/ja/html/_images/tutorial_hphi_gap.png new file mode 100644 index 0000000..65d612c Binary files /dev/null and b/manual/v1.0.0/ja/html/_images/tutorial_hphi_gap.png differ diff --git a/manual/v1.0.0/ja/html/_sources/index.rst.txt b/manual/v1.0.0/ja/html/_sources/index.rst.txt new file mode 100644 index 0000000..3b2de1d --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/index.rst.txt @@ -0,0 +1,20 @@ +.. HTP-tools documentation master file, created by + sphinx-quickstart on Fri Jun 30 11:02:31 2023. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Moller Users Guide +===================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + moller/index + +.. Indices and tables +.. ================== + +.. * :ref:`genindex` +.. * :ref:`modindex` +.. * :ref:`search` diff --git a/manual/v1.0.0/ja/html/_sources/moller/about/index.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/about/index.rst.txt new file mode 100644 index 0000000..8f65b2a --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/about/index.rst.txt @@ -0,0 +1,76 @@ +**************************************************************** +概要 +**************************************************************** + +mollerとは? +---------------------------------------------------------------- + +近年、機械学習を活用した物性予測や物質設計(マテリアルズインフォマティクス)が注目されています。 +機械学習の精度は、適切な教師データの準備に大きく依存しています。 +そのため、迅速に教師データを生成するためのツールや環境の整備は、 +マテリアルズインフォマティクスの研究進展に大きく貢献すると期待されます。 + +mollerは、ハイスループット計算を支援するためのパッケージHTP-Toolsの一つとして提供しています。 +mollerではスーパーコンピュータやクラスタ向けにバッチジョブスクリプトを生成するツールであり、 +多重実行の機能を利用し、パラメータ並列など一連の計算条件について並列にプログラムを実行することができます。 +現状では、東京大学 物性研究所の提供するスーパーコンピュータ ohtaka (slurmジョブスケジューラ) と kugui (PBSジョブスケジューラ)がサポートされています。 + +ライセンス +---------------------------------------------------------------- + +本ソフトウェアのプログラムパッケージおよびソースコード一式はGNU General Public License version 3 (GPL v3) に準じて配布されています。 + +開発貢献者 +---------------------------------------------------------------- + +本ソフトウェアは以下の開発貢献者により開発されています。 + +- ver.1.0.0 (2024/03/06リリース) + +- ver.1.0-beta (2023/12/28リリース) + + - 開発者 + + - 吉見 一慶 (東京大学 物性研究所) + + - 青山 龍美 (東京大学 物性研究所) + + - 本山 裕一 (東京大学 物性研究所) + + - 福田 将大 (東京大学 物性研究所) + + - 井戸 康太 (東京大学 物性研究所) + + - 福島 鉄也 (産業技術総合研究所) + + - 笠松 秀輔 (山形大学 学術研究院(理学部主担当)) + + - 是常 隆  (東北大学大学院理学研究科) + + - プロジェクトコーディネーター + + - 尾崎 泰助 (東京大学 物性研究所) + + +コピーライト +---------------------------------------------------------------- + +.. only:: html + + |copy| *2023- The University of Tokyo. All rights reserved.* + + .. |copy| unicode:: 0xA9 .. copyright sign + +.. only:: latex + + :math:`\copyright` *2023- The University of Tokyo. All rights reserved.* + +本ソフトウェアは2023年度 東京大学物性研究所 ソフトウェア高度化プロジェクトの支援を受け開発されており、その著作権は東京大学が所持しています。 + +動作環境 +---------------------------------------------------------------- + +以下の環境で動作することを確認しています。 + +- Ubuntu Linux + python3 + diff --git a/manual/v1.0.0/ja/html/_sources/moller/appendix/index.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/appendix/index.rst.txt new file mode 100644 index 0000000..fdfa16b --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/appendix/index.rst.txt @@ -0,0 +1,235 @@ +================================================================ +拡張ガイド +================================================================ + +(註: 以下の内容は moller のバージョンによって変わる可能性があります。) + +mollerによるバルク実行 +---------------------------------------------------------------- + +バルク実行とは、大型のバッチキューに投入した一つのバッチジョブの中で、複数の小さいタスクを並行して実行するというものです。動作のイメージとしては、次のようにN個のタスクをバックグラウンドで実行し同時に処理させ、wait文によりすべてのタスクが終了するまで待ちます。 + +.. code-block:: bash + + task param_1 & + task param_2 & + ... + task param_N & + wait + +このとき、バッチジョブに割り当てられたノード・コアを適宜分配し、param_1〜param_N のタスクがそれぞれ別のノード・コアで実行されるように配置する必要があります。また、多数のタスクがある時に、割当てリソースに応じて最大N個のタスクが実行されるよう実行を調整することも必要です。 + +moller で生成したジョブスクリプトを以下では moller scriptと呼ぶことにします。 +moller script では、タスクの並行実行と制御には GNU parallel [1]を利用します。GNU parallel は param_1〜param_N のリストを受取り、これらを引数としてコマンドを並行して実行するツールです。 +以下は GNU parllel を使った実行イメージで、list.dat の各行に param_1〜param_N を列挙しておきます。 + +.. code-block:: bash + + cat list.dat | parallel -j N task + +同時実行数については、バッチジョブに割当てられたノード数・コア数を実行時に環境変数等から取得し、各タスクの並列度(ノード数・プロセス数・スレッド数)の指定(nodeパラメータ)を元に計算します。 + +ノード・コアへのタスクの配置についてはジョブスケジューラによって方法が異なります。SLURM系のジョブスケジューラでは、リソースの排他利用のオプションを使うことで、バッチジョブ内部で発行された複数の srun をジョブスケジューラが適宜配置して実行します。具体的な指定方法はプラットフォームの設定に依存します。 + +一方、PBS系のジョブスケジューラはそのような仕組みがなく、リソースの配分をmoller script内部で処理する必要があります。moller scriptでは、バッチジョブに割り当てられた計算ノードとコアをスロットに分割し、GNU parallel で並行処理されるタスクに分配します。スロットへの分割は、実行時に取得される割当てノード・コアとタスクの並列度指定から計算し、テーブルの形で保持します。タスク内部では、mpirun (mpiexec) の引数や環境変数を通じて計算ノードの指定と割当コアのピン留めを行いプログラムを実行します。この処理は使用するMPI実装に依存します。 + +**参考文献** + +[1] `O. Tange, GNU Parallel - The command-Line Power Tool, ;login: The USENIX Magazine, February 2011:42-47. `_ + + +mollerの動作について +---------------------------------------------------------------- + +mollerで生成されるスクリプトの構成 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +mollerは、入力されたYAMLファイルの内容をもとに、バルク実行のためのジョブスクリプトを生成します。生成されるジョブスクリプトは先頭から順に次のような構成になっています。 + +#. ヘッダ + + ジョブスケジューラへの指示が記述されます。platform セクションに指定した内容が、ジョブスケジューラの種類に応じた形式に整形されて出力されます。この処理はプラットフォーム依存です。 + +#. プロローグ + + prologue セクションに指定した内容です。code ブロックの中身がそのまま転記されます。 + +#. 関数の定義 + + ジョブスクリプト内部で使う関数および変数の定義が出力されます。関数の概要については次節で説明します。この箇所はプラットフォーム依存です。 + +#. コマンドライン引数の処理 + + SLURM系のジョブスケジューラでは、リストファイルの指定やタスクの再実行などのオプション指定を sbatch コマンドの引数として与えることができます。 + + PBS系のジョブスケジューラでは引数の指定は無視されるため、オプション指定はジョブスクリプトを編集してパラメータをセットする必要があります。リストファイルのファイル名はデフォルトで ``list.dat`` です。また、タスクのリトライを行うには ``retry`` 変数を ``1`` にセットします。 + +#. タスクの記述 + + jobs セクションに記述されるタスクの内容を出力します。タスクが複数ある場合はタスクごとに以下の処理を実行します。 + + parallel = false の場合は run ブロックの中身がそのまま転記されます。 + + parallel = true (デフォルト) の場合、task_タスク名 という関数が生成され、並列実行のための前処理と run ブロックの内容が出力されます。並列計算のためのキーワード(``srun`` 、 ``mpiexec`` または ``mpirun``)はプラットフォームに応じたコマンドに置き換えられます。関数定義に続いて並列実行のコマンドが書き出されます。 + +#. エピローグ + + epilogue セクションに指定した内容です。code ブロックの中身がそのまま転記されます。 + + +moller scriptの関数の概要 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +moller script の内部で使用する主な関数の概要を以下に説明します。 + +- ``run_parallel`` + + タスクの内容を記述した関数(タスク関数)を並行実行する関数です。並列度、タスク関数、ステータスファイル名を引数に取ります。内部では ``_find_multiplicity`` 関数を呼んで多重度を計算し、GNU parallel を起動してタスクを並行実行します。GNU parallel の多段処理に対応するために、タスク関数は ``_run_parallel_task`` 関数でラップされます。 + + プラットフォーム依存性は ``_find_multiplicity`` および ``_setup_run_parallel`` 関数としてくくり出しています。 + +- ``_find_multiplicity`` + + 並列実行の多重度を、割当てリソース(ノード数・コア数)とタスクの並列度指定から計算します。PBS系のジョブスケジューラでは、さらに計算ノード・コアをスロットに分割し、テーブルで保持します。 + 実行時に環境から取得する情報は次の通りです。 + + - SLURM系 + + 割当てノード数 _nnodes + ``SLURM_NNODES`` + + 割当てコア数 _ncores + ``SLURM_CPUS_ON_NODE`` + + - PBS系 + + 割当てノード _nodes[] + ``PBS_NODEFILE`` で指定されるファイルから計算ノードのリストを取得 + + ノード数 _nnodes + _nodes[] の項目数 + + 割当てコア数 _ncores + 以下の順に検索されます。 + - ``NCPUS`` (PBS Professional) + - ``OMP_NUM_THREADS`` + - platform セクションの core 指定(スクリプト中に moller_core変数として書き込まれる) + - ヘッダの ncpus または ppn パラメータ + +- ``_setup_run_parallel`` + + GNU parallel による並行実行を開始する前にいくつか処理を追加するために呼ばれます。PBS系ではスロットに分割されたノード・コアのテーブルをタスク関数から参照できるよう export します。SLURM系では実行する内容はありません。 + + +各タスクに対応するタスク関数の構成については次の通りです。 + +- タスク関数の引数は 1) 並列度指定(ノード数・プロセス数・スレッド数) 2) 実行ディレクトリ 3) GNU parallel のスロットID です。 + +- ``_setup_taskenv`` で実行環境の設定を行います。この関数はプラットフォーム依存です。PBS系ではスロットIDに基づいて計算ノード・コアをテーブルから取得します。SLURM系では実行する内容はありません。 + +- 直前に実行するタスクが正常終了したかどうかを ``_is_ready`` 関数を呼んでチェックします。正常終了している場合はタスクの処理を継続します。それ以外の場合は -1 のステータスでタスクの処理を中断します。 + +- code ブロックの内容を転記します。その際に、並列計算のためのキーワード(``srun`` 、 ``mpiexec`` または ``mpirun``)はプラットフォームに応じたコマンドに置き換えられます。 + + +mollerを他のシステムで使うには +---------------------------------------------------------------- + +mollerには現在、物性研スーパーコンピュータシステム ohtaka および kugui 向けの設定が用意されています。mollerを他のシステムで使うための拡張ガイドを以下で説明します。 + + +クラス構成 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +mollerの構成のうちプラットフォーム依存の部分は ``platform/`` ディレクトリにまとめています。 +クラス構成は次のとおりです。 + +.. graphviz:: + + digraph class_diagram { + size="5,5" + node[shape=record,style=filled,fillcolor=gray95] + edge[dir=back,arrowtail=empty] + + Platform[label="{Platform (base.py)}"] + BaseSlurm[label="{BaseSlurm (base_slurm.py)}"] + BasePBS[label="{BasePBS (base_pbs.py)}"] + BaseDefault[label="{BaseDefault (base_default.py)}"] + + Ohtaka[label="{Ohtaka (ohtaka.py)}"] + Kugui[label="{Kugui (kugui.py)}"] + Pbs[label="{Pbs (pbs.py)}"] + Default[label="{DefaultPlatform (default.py)}"] + + Platform->BaseSlurm + Platform->BasePBS + Platform->BaseDefault + + BaseSlurm->Ohtaka + BasePBS->Kugui + BasePBS->Pbs + BaseDefault->Default + } + +プラットフォームの選択についてはファクトリが用意されています。``register_platform(登録名, クラス名)`` でクラスをファクトリに登録し、 ``platform/__init__.py`` にクラスを import しておくと、入力パラメータファイル中で platform セクションの system パラメータに指定できるようになります。 + + +SLURM系ジョブスケジューラ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +SLURM系のジョブスケジューラを利用している場合、BaseSlurm クラスを元にシステム固有の設定を行います。 +並列計算を実行するキーワードを置き換える文字列は ``parallel_command()`` メソッドの戻り値で与えます。リソースの排他利用を行うための srun のパラメータをここに指定します。 +具体例は ohtaka.py を参照してください。 + +PBS系ジョブスケジューラ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +PBS系のジョブスケジューラ (PBS Professional, OpenPBS, Torque など)を利用している場合、BasePBS クラスを元にシステム固有の設定を行います。 + +PBS系ではバッチジョブのノード数の指定の仕方に2通りあり、PBS Professional は select=N:ncpus=n という書式で指定しますが、Torque などは node=N:ppn=n と記述します。後者の指定を用いる場合は ``self.pbs_use_old_format = True`` をセットします。 + +計算ノードのコア数は node パラメータで指定できますが、対象システムを限定してコア数のデフォルト値を設定しておくこともできます。kugui.py ではノードあたり128コアを設定しています。 + +細かいカスタマイズが必要な場合 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +基底クラスを参照して必要なメソッドを再定義します。メソッド構成は次のようになっています。 + +- ``setup`` + + platform セクションのパラメータの取り出しなどを行います。 + +- ``parallel_command`` + + 並列計算のキーワード (``srun``, ``mpiexec``, ``mpirun``) を置き換える文字列を返します。 + +- ``generate_header`` + + ジョブスケジューラオプションの指定を記述したヘッダを生成します。 + +- ``generate_function`` + + moller script 内部で使用する関数の定義を生成します。変数および関数の実体はそれぞれ以下のメソッドで作られます。 + + - ``generate_variable`` + - ``generate_function_body`` + + それぞれの関数は埋め込み文字列としてクラス内で定義されています。 + +新しいタイプのジョブスケジューラに対応させるには +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +moller scriptの動作のうちプラットフォーム依存な箇所は、並行実行の多重度の計算、リソース配置に関する部分、並列計算のコマンドです。 + +- 割当てノード・ノード数・ノードあたりのコア数を実行時に環境変数等から取得する方法 + +- 並列計算を実行するコマンド (mpiexec等) と、実行ホストやコア割当の指定のしかた + +これらをもとにmoller script内で使う関数を作成します。 +``printenv`` コマンドでジョブスクリプト内で有効な環境変数の一覧を取得できます。 + + +トラブルシューティング +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +moller script内の ``_debug`` 変数を 1 にセットすると、バッチジョブ実行時にデバッグ出力が書き出されます。もしジョブがうまく実行されないときは、デバッグ出力を有効にして、内部パラメータが正しくセットされているかを確認してみてください。 + diff --git a/manual/v1.0.0/ja/html/_sources/moller/basic-usage.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/basic-usage.rst.txt new file mode 100644 index 0000000..5407f61 --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/basic-usage.rst.txt @@ -0,0 +1,154 @@ +インストールと基本的な使い方 +================================================================ + +**必要なライブラリ・環境** + + HTP-tools に含まれる網羅計算ツール moller を利用するには、以下のプログラムとライブラリが必要です。 + + - Python 3.x + - ruamel.yaml モジュール + - tabulate モジュール + - GNU Parallel (ジョブスクリプトを実行するサーバ・計算ノード上にインストールされていること) + +**ソースコード配布サイト** + + - `GitHubリポジトリ `_ + +**ダウンロード方法** + + gitを利用できる場合は、以下のコマンドでmollerをダウンロードできます。 + + .. code-block:: bash + + $ git clone https://github.com/issp-center-dev/Moller.git + +**インストール方法** + + mollerをダウンロード後、以下のコマンドを実行してインストールします。mollerが利用するライブラリも必要に応じてインストールされます。 + + .. code-block:: bash + + $ cd ./Moller + $ python3 -m pip install . + + 実行プログラム ``moller`` および ``moller_status`` がインストールされます。 + +**ディレクトリ構成** + + :: + + . + |-- LICENSE + |-- README.md + |-- pyproject.toml + |-- docs/ + | |-- ja/ + | |-- en/ + | |-- tutorial/ + |-- src/ + | |-- moller/ + | |-- __init__.py + | |-- main.py + | |-- platform/ + | | |-- __init__.py + | | |-- base.py + | | |-- base_slurm.py + | | |-- base_pbs.py + | | |-- base_default.py + | | |-- ohtaka.py + | | |-- kugui.py + | | |-- pbs.py + | | |-- default.py + | | |-- function.py + | | |-- utils.py + | |-- moller_status.py + |-- sample/ + +**基本的な使用方法** + +mollerはスーパーコンピュータ向けにバッチジョブスクリプトを生成するツールです。多重実行の機能を利用して、パラメータ並列など一連の計算条件について並列にプログラムを実行します。 + +#. 構成定義ファイルの作成 + + mollerを使用するには、まず、計算内容を記述した構成定義ファイルをYAML形式で作成します。詳細についてはファイルフォーマットの章を参照してください。 + +#. コマンドの実行 + + 作成した構成定義ファイルを入力としてmollerプログラムを実行します。バッチジョブスクリプトが生成されます。 + + .. code-block:: bash + + $ moller -o job.sh input.yaml + +#. バッチジョブの実行 + + 生成されたバッチジョブスクリプトを対象となるスーパーコンピュータシステムに転送します。 + 並列実行する各パラメータごとにディレクトリを用意し、 ``list.dat`` にディレクトリ名を列挙します。 + ``list.dat`` には、ジョブを実行するディレクトリからの相対パスまたは絶対パスを記述します。 + + リストファイルが用意できたらバッチジョブを投入します。 + 以下では、物性研システムB(ohtaka)およびシステムC(kugui)で実行するケースをそれぞれ示します。 + + - 物性研システムB(ohtaka)の場合 + + ohtaka では slurm ジョブスケジューラが使用されています。バッチジョブを投入するには、バッチジョブスクリプトを引数として sbatch コマンドを実行します。ジョブスクリプト名に続けてスクリプトのパラメータを渡すことができます。パラメータとしてリストファイルを指定します。 + + .. code-block:: bash + + $ sbatch job.sh list.dat + + リストファイルの指定がない場合は list.dat がデフォルトとして使われます。 + + + - 物性研システムC(kugui)の場合 + + kugui では PBS ジョブスケジューラが使用されています。バッチジョブを投入するには、バッチジョブスクリプトを引数として qsub コマンドを実行します。スクリプトのパラメータの指定はできないので、リストファイルは list.dat として用意する必要があります。 + + .. code-block:: bash + + $ qsub job.sh + +#. 結果の確認 + + バッチジョブ終了後に、 + + .. code-block:: bash + + $ moller_status input.yaml list.dat + + を実行すると、各パラメータセットについて計算が正常に終了したかどうかを集計したレポートが出力されます。 + + +#. ジョブの再開・再実行 + + ジョブが途中で終わった場合、続けて実行するには、同じリストファイルを指定してもう一度バッチジョブを投入します。 + 未実行(未完了を含む)のタスクから実行が継続されます。 + + - 物性研システムB(ohtaka)の場合 + + 以下のように、リストファイルを指定して sbatch コマンドを実行します。 + + .. code-block:: bash + + $ sbatch job.sh list.dat + + エラーで終了したタスクを再実行するには、--retry オプションを付けてバッチジョブを投入します。 + + .. code-block:: bash + + $ sbatch job.sh --retry list.dat + + - 物性研システムC(kugui)の場合 + + job.sh を編集して retry=0 の行を retry=1 に書き換えた後、 + + .. code-block:: bash + + $ qsub job.sh + + を実行します。 + + +**参考文献** + +[1] `O. Tange, GNU Parallel - The command-Line Power Tool, ;login: The USENIX Magazine, February 2011:42-47. `_ diff --git a/manual/v1.0.0/ja/html/_sources/moller/command/index.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/command/index.rst.txt new file mode 100644 index 0000000..87cc10b --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/command/index.rst.txt @@ -0,0 +1,96 @@ +コマンドリファレンス +================================================================ + +moller +---------------------------------------------------------------- + + 網羅計算のためのバッチジョブスクリプトを生成する + +書式: + + .. code-block:: bash + + moller [-o job_script] input_yaml + +説明: + + input_yaml に指定した構成定義ファイルを読み込み、バッチジョブスクリプトを生成します。 + 以下のオプションを受け付けます。 + + - -o, --output job_script + + 出力先のファイル名を指定します。構成定義ファイル内の output_file パラメータより優先されます。ファイル名の指定がない場合は標準出力に書き出されます。 + + - -h + + ヘルプを表示します。 + +moller_status +---------------------------------------------------------------- + + 網羅計算ジョブの実行状況をレポートする + +書式: + + .. code-block:: bash + + moller_status [-h] [--text|--csv|--html] [--ok|--failed|--skipped|--collapsed|--yet] [-o output_file] input_yaml [list_file] + +説明: + + mollerで生成したジョブスクリプトを実行した際に、ジョブごとの各タスクが完了したかどうかを集計してレポートを作成します。input_yaml に指定した構成定義ファイルからタスクの内容を読み込みます。ジョブのリストは list_file に指定したファイルから取得します。list_file が指定されていないときは、実行時ログファイルから収集します。 + 出力形式をオプションで指定できます。デフォルトはテキスト形式です。出力先を -o または --output オプションで指定します。指定がない場合は標準出力に書き出されます。 + + - 出力モード + + 出力形式を指定します。以下のいずれかを指定できます。複数同時に指定した場合はエラーになります。デフォルトはテキスト形式です。 + + - --text + テキスト形式で出力します。 + - --csv + CSV (カンマ区切りテキスト) 形式で出力します。 + - --html + HTML形式で出力します。 + + - input_yaml + + mollerの構成定義ファイルを指定します。 + + - list_file + + ジョブのリストを格納したファイルを指定します。指定がない場合は、バッチジョブから出力されるログファイル stat_{task}.dat から収集します。 + + - -o, --output output_file + + 出力先のファイル名を指定します。指定がない場合は標準出力に書き出されます。 + + - フィルタ + + 出力内容を指定します。以下のいずれかを指定できます。指定がない場合は全てのジョブの情報が出力されます。 + + - --ok + 全てのタスクが完了したジョブのみを表示します。 + + - --failed + エラー、スキップまたは未実行のタスクがあるジョブを表示します。 + + - --skipped + 実行をスキップしたタスクがあるジョブを表示します。 + + - --yet + 未実行のタスクがあるジョブを表示します。 + + - --collapsed + エラー終了したタスクがあるジョブを表示します。 + + - --all + 全てのジョブを表示します。(デフォルト) + + - -h + + ヘルプを表示します。 + +ファイル: + + mollerで生成したジョブスクリプトを用いてプログラムを並列実行すると、実行状況がログファイル stat_{task}.dat に出力されます。moller_status はこのファイルを集計し、読みやすい形式に整形します。 + diff --git a/manual/v1.0.0/ja/html/_sources/moller/filespec/index.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/filespec/index.rst.txt new file mode 100644 index 0000000..0043c6d --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/filespec/index.rst.txt @@ -0,0 +1,133 @@ +.. _sec-fileformat: + +ファイルフォーマット +================================================================ + +構成定義ファイル +---------------------------------------------------------------- + +構成定義ファイルでは、moller でバッチジョブスクリプトを生成するための設定情報を YAML形式で記述します。本ファイルは以下の部分から構成されます。 + + 1. 全般的な記述: ジョブ名や出力ファイル名などを設定します。 + + 2. platformセクション: バッチジョブを実行するシステムやバッチジョブに関する設定を記述します。 + + 3. prologue, epilogue セクション: バッチジョブ内で行う環境設定や終了処理などを記述します。 + + 4. jobsセクション: タスクを記述します。 + +全体 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + ``name`` + + バッチジョブのジョブ名を指定します。指定がない場合は空欄となります。(通常はジョブスクリプトのファイル名がジョブ名になります) + + ``description`` + + バッチジョブの説明を記述します。コメントとして扱われます + + ``output_file`` + + moller の出力先ファイル名を指定します。コマンドライン引数の指定がある場合はコマンドライン引数の指定を優先します。いずれも指定がない場合は標準出力に出力されます。 + +platform +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ``system`` + + 対象となるシステムを指定します。現状では ohtaka と kugui が指定できます。 + + ``queue`` + + 使用するバッチキューの名称を指定します。キューの名称はシステムに依存します。 + + ``node`` + + 使用するノード数を指定します。指定方法は ノード数(整数値) または [ノード数, ノードあたりのコア数] (整数値のリスト) です。数値の範囲はシステムとキューの指定に依存します。(ノードあたりのコア数の指定はkugui,defaultのみ有効。ohtakaの場合は使われません。) + + ``core`` + + 1ノードあたり使用するコア数を指定します。数値の範囲はシステムとキューの指定に依存します。 ``node`` パラメータに同時にノードあたりのコア数が指定されている場合、 ``core`` の指定が優先します。(kugui,defaultのみ) + + ``elapsed`` + + バッチジョブの実行時間を指定します。書式は HH:MM:SS です。 + + ``options`` + + その他のバッチジョブオプションを指定します。書式は、ジョブスクリプトのオプション行の内容をリスト形式または複数行からなる文字列で記述したものです。各行の冒頭の指示語(``#PBS`` や ``#SBATCH`` など)は含めません。以下に例を示します。 + + - SLURMの場合 (文字列で指定する例) + + .. code-block:: yaml + + options: | + --mail-type=BEGIN,END,FAIL + --mail-user=user@sample.com + --requeue + + - PBSの場合 (リストで指定する例) + + .. code-block:: yaml + + options: + - -m bea + - -M user@sample.com + - -r y + + +prologue, epilogue +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +prologueセクションはタスク開始前に実行する内容を記述します。ライブラリやパスなど環境変数の設定等を行うのに利用できます。epilogueセクションは全タスク終了後に実行する内容を記述します。 + + ``code`` + + 処理内容をシェルスクリプトの記法で記述します。記述内容はバッチジョブスクリプト中に埋め込まれてバッチジョブ内で実行されます。 + +jobs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +ジョブで実行する一連のタスクを、タスク名をキー、処理内容を値として記述するテーブルの形式で記述します。 + + キー + + タスク名 + + 値 + + 以下の項目からなるテーブル: + + ``description`` + + タスクの説明を記述します。コメントとして扱われます。 + + ``node`` + + 並列度を指定します。指定方法は以下のいずれかです。 + + - [ プロセス数, プロセスあたりのスレッド数 ] + - [ ノード数, プロセス数, プロセスあたりのスレッド数 ] + - ノード数 + + ノード数を指定した場合、その数のノードが排他的にジョブに割り当てられます。ノード数を指定しない1番目の形式の場合、使用コア数が1ノードに満たないときは複数のジョブがノードに詰めて割当られます。1ノード以上を使う場合は必要ノード数を占有して実行されます。 + + ``parallel`` + + ジョブ間で多重実行する場合は ``true``, 逐次実行する場合は ``false`` を指定します。デフォルトは ``true`` です。 + + ``run`` + + タスクの処理内容をシェルスクリプトの記法で記述します。MPIプログラムまたは MPI/OpenMP ハイブリッドプログラムを実行する箇所は + + .. code-block:: bash + + srun prog [arg1, ...] + + と記述します。 ``srun`` の他に ``mpirun``, ``mpiexec`` のキーワードが有効です。このキーワードは、実際のバッチジョブスクリプト中では、並列実行のためのコマンド (``srun`` や ``mpirun``) と ``node`` パラメータで指定した並列度の設定に置き換えて記述されます。 + +リストファイル +---------------------------------------------------------------- + +ジョブのリストを指定します。ファイルはテキスト形式で、一行に一つのジョブ名を記述します(ディレクトリ名がジョブ名となります)。 + +mollerでは、ジョブごとにディレクトリを用意し、ジョブ内の各タスクはディレクトリに移動して実行されます。ディレクトリはバッチジョブを実行するディレクトリの直下に配置されているものと仮定します。 + diff --git a/manual/v1.0.0/ja/html/_sources/moller/index.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/index.rst.txt new file mode 100644 index 0000000..35373a4 --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/index.rst.txt @@ -0,0 +1,15 @@ +**************************************************************** +網羅計算ツール (moller) +**************************************************************** + + +.. toctree:: + :maxdepth: 2 + :numbered: 2 + + about/index + basic-usage + tutorial/index + command/index + filespec/index + appendix/index diff --git a/manual/v1.0.0/ja/html/_sources/moller/tutorial/basic.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/tutorial/basic.rst.txt new file mode 100644 index 0000000..093dc68 --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/tutorial/basic.rst.txt @@ -0,0 +1,141 @@ +.. _sec-tutorial: + +基本的な使い方 +--------------- + +網羅計算のためのバッチジョブスクリプト生成ツール moller を使うには、入力ファイルとして実行内容を記述する構成定義ファイルを用意した後、プログラム moller を実行します。生成されたバッチジョブスクリプトを対象とするスーパーコンピュータシステムに転送し、バッチジョブを投入して計算を行います。 +以下では、 ``docs/tutorial/moller`` ディレクトリにあるサンプルを例にチュートリアルを実施します。 + +構成定義ファイルを作成する +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +構成定義ファイルにはバッチジョブで実行する処理の内容を記述します。 +ここで、バッチジョブとはスーパーコンピュータシステム等のジョブスケジューラに投入する実行内容を指します。それに対し、moller が対象とするプログラムの多重実行において、多重実行される一つのパラメータセットでの実行内容をジョブと呼ぶことにします。一つのジョブはいくつかの処理単位からなり、その処理単位をタスクと呼びます。moller ではタスクごとに多重実行し、タスクの前後で同期がとられます。 + +.. only:: html + + .. figure:: ../../_static/task_view.png + :alt: タスクとジョブ + + 例: 一つのバッチジョブ内で job #1〜#3 の 3つのジョブを実行する。ジョブはそれぞれ異なるパラメータセットなどに対応する。ジョブの実行内容は task 1〜4 の一連のタスクからなる。タスクごとに job #1〜#3 の処理を並列に行う。 + +.. only:: latex + + .. figure:: ../../_static/task_view.pdf + :scale: 100% + :alt: タスクとジョブ + + 例: 一つのバッチジョブ内で job #1〜#3 の 3つのジョブを実行する。ジョブはそれぞれ異なるパラメータセットなどに対応する。ジョブの実行内容は task 1〜4 の一連のタスクからなる。タスクごとに job #1〜#3 の処理を並列に行う。 + +以下に構成定義ファイルのサンプルを記載します。構成定義ファイルは YAMLフォーマットのテキストファイルで、実行するプラットフォームやバッチジョブのパラメータと、タスクの処理内容、前処理・後処理を記述します。 + +.. literalinclude:: ../../../../tutorial/moller/input.yaml + +platformセクションでは、実行するプラットフォームの種類を指定します。この場合は、物性研システムB(ohtaka)での設定をしています。 + +prologueセクションでは、バッチジョブの前処理を記述します。タスクを実行する前に実行する共通のコマンドラインを記述します。 + +jobsセクションでは、タスクの処理内容を記述します。ジョブで実行する一連のタスクを、タスク名をキー、処理内容を値として記述するテーブルの形式で記述します。 + +この例では、最初に"start..."を出力するタスクを start というタスク名で定義しています。 +ここでは ``parallel = false`` に設定しています。この場合、ジョブ単位での並列は行われず、``run`` に記述した内容が逐次的に実行されます。 + +次に、"hello world."を出力するタスクを hello world というタスク名で定義しています。 +ここでは ``parallel`` が設定されていないので、 ``paralle = true`` として扱われます。この場合、ジョブ単位での並列が行われます。 +同様に、次に "hello world again." を出力するタスクを hello_again というタスク名で定義しています。 + +最後に、epilogueセクションでは、バッチジョブの後処理を記述します。タスクを実行した後に実行する共通のコマンドラインを記述します。 + +仕様の詳細については :ref:`ファイルフォーマット ` の章を参照してください。 + + +バッチジョブスクリプトを生成する +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +構成定義ファイル(input.yaml)を入力として moller を実行します。 + +.. code-block:: bash + + $ moller -o job.sh input.yaml + +バッチジョブスクリプトが生成され出力されます。出力先は構成定義ファイル内のパラメータ、または、コマンドラインの -o または --output オプションで指定するファイルです。 +両方指定されている場合はコマンドラインパラメータが優先されます。いずれも指定がない場合は標準出力に書き出されます。 + +必要に応じて mollerで生成したバッチジョブスクリプトを対象のシステムに転送します。 +なお、スクリプトの種類は bash スクリプトです。ジョブ実行時に使用するシェルを bash に設定しておく必要があります。(ログインシェルを csh系などにしている場合は注意) + + +リストファイルを作成する +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +実行するジョブのリストを作成します。moller では、ジョブごとに個別のディレクトリを用意し、そのディレクトリ内で各ジョブを実行する仕様になっています。 +対象となるディレクトリのリストを格納したファイルを、たとえば以下のコマンドで、リストファイルとして作成します。 + +.. code-block:: bash + + $ /usr/bin/ls -1d * > list.dat + +チュートリアルには、データセットとリストファイルを作成するユーティリティープログラムが付属しています。 + +.. code-block:: bash + + $ bash ./make_inputs.sh + +を実行すると、 ``output`` ディレクトリの下にデータセットに相当する ``dataset-0001`` 〜 ``dataset-0020`` のディレクトリと、リストファイル ``list.dat`` が作成されます。 + + +網羅計算を実行する +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +mollerで生成したバッチジョブスクリプトをジョブスケジューラに投入します。この例ではジョブスクリプトと入力ファイルを ``output`` ディレクトリにコピーし、 ``output`` に移動してジョブを投入しています。 + + .. code-block:: bash + + $ cp job.sh input.yaml output/ + $ cd output + $ sbatch job.sh list.dat + +ジョブが実行されると、リストに記載されたディレクトリにそれぞれ "result.txt" というファイルが生成されます。 +"result.txt" には、ジョブ実行結果の "hello world.", "hello world again." という文字列が出力されていることが確認できます。 + +実行状況を確認する +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +タスクの実行状況はログファイルに出力されます。ログを収集してジョブごとに実行状況を一覧するツール moller_status が用意されています。ジョブを実行するディレクトリで以下を実行します。 + +.. code-block:: bash + + $ moller_status input.yaml list.dat + +引数には構成定義ファイル input.yaml とリストファイル list.dat を指定します。リストファイルは省略可能で、その場合はログファイルからジョブの情報を収集します。 + +出力サンプルを以下に示します。 + +.. literalinclude:: ../../../../tutorial/moller/reference/status.txt + + +「o」は正常終了したタスク、「x」はエラーになったタスク、「-」は前のタスクがエラーになったためスキップされたタスク、「.」は未実行のタスクを示します。 +今回は全て正常終了していることがわかります。 + +失敗したタスクを再実行する +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +タスクが失敗した場合、そのジョブ内の後続のタスクは実行されません。以下は、各タスクが 10% の確率で失敗するケースの実行例です。 + +.. literalinclude:: ../../../../tutorial/moller/reference/status_failed.txt + +dataset_0003, dataset_0004 は task1 が失敗し、後続の task2, task3 は実行されていません。その他の dataset は task1 が成功し、次の task2 が実行されています。このように、各ジョブは他のジョブとは独立に実行されます。 + +失敗したタスクを再実行するには、バッチジョブに retry のオプションをつけて再実行します。 +SLURMジョブスケジューラ (例: 物性研システムB) の場合は次のようにバッチジョブを投入します。 + +.. code-block:: bash + + $ sbatch job.sh --retry list.dat + +PBSジョブスケジューラ (例: 物性研システムC) の場合はジョブスクリプトを編集し、 ``retry=0`` の行を ``retry=1`` に変更して、バッチジョブを再投入します。 + +.. literalinclude:: ../../../../tutorial/moller/reference/status_retry.txt + +エラーになったタスクのみ再実行されます。上記の例では、dataset_0003 は task1 が再実行され正常終了し、次の task2 の実行に失敗しています。dataset_0004 は task1, task2, task3 が正常に実行されています。task3 まで全て正常終了しているデータ・セットに対しては何も実行しません。 + +なお、再実行の際にリストファイルは変更しないでください。リストファイル内の順番でジョブを管理しているため、変更すると正しく再実行されません。 diff --git a/manual/v1.0.0/ja/html/_sources/moller/tutorial/dsqss.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/tutorial/dsqss.rst.txt new file mode 100644 index 0000000..3aa491b --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/tutorial/dsqss.rst.txt @@ -0,0 +1,79 @@ +DSQSS による *moller* 計算の例 +--------------------------------------------- + +このチュートリアルについて +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +これは、量子多体問題の経路積分モンテカルロ法を実行するためのオープンソースソフトウェアパッケージである `DSQSS `__ を用いた ``moller`` の例です。この例では、周期境界条件下の :math:`S=1/2` (DSQSSの用語では :math:`M=1`) および :math:`S=1` (:math:`M=2`) 反強磁性ハイゼンベルク鎖の磁気感受率 :math:`\chi` の温度依存性を計算します。 ``moller`` を使用することで、異なるパラメーター (:math:`M, L, T`) の計算を並列に実行します。 + +この例は `公式チュートリアルの一つ `__ に対応しています。 + +準備 +~~~~~ + +``moller`` (HTP-tools)パッケージと ``DSQSS`` がインストールされていることを確認してください。このチュートリアルでは、ISSP のスーパーコンピュータシステム ``ohtaka`` を使用して計算を実行します。 + +実行方法 +~~~~~~~~ + +1. データセットを準備する + + このパッケージに含まれるスクリプト ``make_inputs.sh`` を実行します。 + + .. code:: bash + + $ bash ./make_inputs.sh + + これにより、 ``output`` ディレクトリが作成されます(すでに存在する場合は、まず削除し、再度作成します)。 ``output`` の下には、各パラメーター用の作業ディレクトリ(例: ``L_8__M_1__T_1.0``)が生成されます。ディレクトリのリストは ``list.dat`` ファイルに書き込まれます。 + +2. ``moller`` を使用してジョブスクリプトを生成する + + ジョブ記述ファイルを使用してジョブスクリプトを生成し、 ``job.sh`` というファイル名で保存します。 + + .. code:: bash + + $ moller -o job.sh input.yaml + + 次に、``job.sh`` を ``output`` ディレクトリにコピーし、 ``output`` ディレクトリに移動します。 + +3. バッチジョブを実行する + + ジョブリストを引数としてバッチジョブを送信します。 + + .. code:: bash + + $ sbatch job.sh list.dat + +4. 状態を確認する + + タスク実行の状態は ``moller_status`` プログラムによってまとめられます。 + + .. code:: bash + + $ moller_status input.yaml list.dat + +5. 結果を集める + + 計算が終了した後、結果を以下のようにして集めます。 + + .. code:: bash + + $ python3 ../extract_result.py list.dat + + このスクリプトは、:math:`M`, :math:`L`, :math:`T`, :math:`\chi` の平均、および :math:`\chi` の標準誤差を含む 5 列のテキストファイル ``result.dat`` に結果を書き込みます。 + + 結果を視覚化するために、GNUPLOT ファイル ``plot_M1.plt`` および ``plot_M2.plt`` が利用可能です。 + + .. code:: bash + + $ gnuplot --persist plot_M1.plt + $ gnuplot --persist plot_M2.plt + + |S=1/2 の磁気感受率| |S=2 の磁気感受率| + + :math:`S=1/2` と :math:`S=1` AFH 鎖の主な違いは、励起ギャップが消失するか (:math:`S=1/2`)、残るか (:math:`S=1`) のどちらかです。 + これを反映して、非常に低温領域での磁気感受率は、有限になる (:math:`S=1/2`) か、消失する (:math:`S=1`) かのどちらかです。 + :math:`S=1/2` の場合には、有限サイズ効果によりスピンギャップが開き、そのため小さいチェーンの磁気感受率が低下します。 + +.. |S=1/2 の磁気感受率| image:: ../../../../images/tutorial_dsqss_M1.* +.. |S=2 の磁気感受率| image:: ../../../../images/tutorial_dsqss_M2.* diff --git a/manual/v1.0.0/ja/html/_sources/moller/tutorial/hphi.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/tutorial/hphi.rst.txt new file mode 100644 index 0000000..9b02a64 --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/tutorial/hphi.rst.txt @@ -0,0 +1,86 @@ +HPhi による *moller* 計算の例 +------------------------------------------ + +このチュートリアルについて +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +これは、量子多体問題の正確な対角化方法を実行するためのオープンソースソフトウェアパッケージである `HPhi `__ を用いた ``moller`` の例です。 +この例では、周期境界条件下の :math:`S=1/2` (``2S_1`` ディレクトリ) と :math:`S=1` (``2S_2``) 反強磁性ハイゼンベルク鎖の励起ギャップ :math:`\Delta` のシステムサイズ依存性を計算します。 +``moller`` を使用することで、異なるシステムサイズの計算を並列に実行します。 +これはHPhi 公式チュートリアルの `セクション 1.4 `__ に対応しています。 + +準備 +~~~~~ + +``moller`` (HTP-tools)パッケージと ``HPhi`` がインストールされていることを確認してください。このチュートリアルでは、ISSP のスーパーコンピュータシステム ``ohtaka`` を使用して計算を実行します。 + +実行方法 +~~~~~~~~ + +1. データセットを準備する + + ``2S_1``, ``2S_2`` に含まれるスクリプト ``make_inputs.sh`` を実行します。 + + .. code:: bash + + $ bash ./make_inputs.sh + + ``L_8``, ``L_10``, ..., ``L_24`` (``2S_2`` の場合は ``L_18`` まで) の作業ディレクトリが生成されます。 + ディレクトリのリストは ``list.dat`` ファイルに書き込まれます。 + さらに、作業ディレクトリからエネルギーギャップを集めるためのシェルスクリプト、 ``extract_gap.sh`` が生成されます。 + +2. ``moller`` を使用してジョブスクリプトを生成する + + ``input.yaml`` からジョブスクリプトを生成し、 ``job.sh`` というファイル名で保存します。 + + .. code:: bash + + $ moller -o job.sh input.yaml + +3. バッチジョブを実行する + + ジョブリストを引数としてバッチジョブを送信します。 + + .. code:: bash + + $ sbatch job.sh list.dat + +4. 状態を確認する + + タスク実行の状態は ``moller_status`` プログラムによって確認できます。 + + .. code:: bash + + $ moller_status input.yaml list.dat + +5. 結果を集める + + 計算が終了した後、ジョブからエネルギーギャップを以下のようにして集めます。 + + .. code:: bash + + $ bash extract_gap.sh + + このスクリプトは、長さ :math:`L` とギャップ :math:`\Delta` のペアをテキストファイル ``gap.dat`` に書き込みます。 + + 結果を視覚化するために、Gnuplot ファイル ``gap.plt`` が利用可能です。 + このファイルでは、得られたギャップデータが予想される曲線によってフィットされます。 + + .. math:: \Delta(L; S=1/2) = \Delta_\infty + A/L + + および + + .. math:: \Delta(L; S=1) = \Delta_\infty + B\exp(-CL). + + グラフは次のコマンドで描画できます。 + + .. code:: bash + + $ gnuplot --persist gap.plt + + .. figure:: ../../../../images/tutorial_hphi_gap.* + :alt: スピンギャップの有限サイズ効果 + + スピンギャップの有限サイズ効果 + + :math:`S=1/2` の場合、対数補正によりスピンギャップは有限のままです。一方で、:math:`S=1` の場合、外挿値 :math:`\Delta_\infty = 0.417(1)` は以前の結果(例えば、QMC による :math:`\Delta_\infty = 0.41048(6)` (Todo and Kato, PRL **87**, 047203 (2001)))とよくあっています。 diff --git a/manual/v1.0.0/ja/html/_sources/moller/tutorial/index.rst.txt b/manual/v1.0.0/ja/html/_sources/moller/tutorial/index.rst.txt new file mode 100644 index 0000000..4fbc667 --- /dev/null +++ b/manual/v1.0.0/ja/html/_sources/moller/tutorial/index.rst.txt @@ -0,0 +1,11 @@ +.. _sec-tutorial: + +チュートリアル +================================================================ + +.. toctree:: + :maxdepth: 2 + + basic + hphi + dsqss diff --git a/manual/v1.0.0/ja/html/_static/alabaster.css b/manual/v1.0.0/ja/html/_static/alabaster.css new file mode 100644 index 0000000..55f9cb1 --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/alabaster.css @@ -0,0 +1,708 @@ +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: Georgia; + font-size: 17px; + background-color: #fff; + color: #000; + margin: 0; + padding: 0; +} + + +div.document { + width: 940px; + margin: 30px auto 0 auto; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 220px; +} + +div.sphinxsidebar { + width: 220px; + font-size: 14px; + line-height: 1.5; +} + +hr { + border: 1px solid #B1B4B6; +} + +div.body { + background-color: #fff; + color: #3E4349; + padding: 0 30px 0 30px; +} + +div.body > .section { + text-align: left; +} + +div.footer { + width: 940px; + margin: 20px auto 30px auto; + font-size: 14px; + color: #888; + text-align: right; +} + +div.footer a { + color: #888; +} + +p.caption { + font-family: inherit; + font-size: inherit; +} + + +div.relations { + display: none; +} + + +div.sphinxsidebar { + max-height: 100%; + overflow-y: auto; +} + +div.sphinxsidebar a { + color: #444; + text-decoration: none; + border-bottom: 1px dotted #999; +} + +div.sphinxsidebar a:hover { + border-bottom: 1px solid #999; +} + +div.sphinxsidebarwrapper { + padding: 18px 10px; +} + +div.sphinxsidebarwrapper p.logo { + padding: 0; + margin: -10px 0 0 0px; + text-align: center; +} + +div.sphinxsidebarwrapper h1.logo { + margin-top: -10px; + text-align: center; + margin-bottom: 5px; + text-align: left; +} + +div.sphinxsidebarwrapper h1.logo-name { + margin-top: 0px; +} + +div.sphinxsidebarwrapper p.blurb { + margin-top: 0; + font-style: normal; +} + +div.sphinxsidebar h3, +div.sphinxsidebar h4 { + font-family: Georgia; + color: #444; + font-size: 24px; + font-weight: normal; + margin: 0 0 5px 0; + padding: 0; +} + +div.sphinxsidebar h4 { + font-size: 20px; +} + +div.sphinxsidebar h3 a { + color: #444; +} + +div.sphinxsidebar p.logo a, +div.sphinxsidebar h3 a, +div.sphinxsidebar p.logo a:hover, +div.sphinxsidebar h3 a:hover { + border: none; +} + +div.sphinxsidebar p { + color: #555; + margin: 10px 0; +} + +div.sphinxsidebar ul { + margin: 10px 0; + padding: 0; + color: #000; +} + +div.sphinxsidebar ul li.toctree-l1 > a { + font-size: 120%; +} + +div.sphinxsidebar ul li.toctree-l2 > a { + font-size: 110%; +} + +div.sphinxsidebar input { + border: 1px solid #CCC; + font-family: Georgia; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + width: 160px; +} + +div.sphinxsidebar .search > div { + display: table-cell; +} + +div.sphinxsidebar hr { + border: none; + height: 1px; + color: #AAA; + background: #AAA; + + text-align: left; + margin-left: 0; + width: 50%; +} + +div.sphinxsidebar .badge { + border-bottom: none; +} + +div.sphinxsidebar .badge:hover { + border-bottom: none; +} + +/* To address an issue with donation coming after search */ +div.sphinxsidebar h3.donation { + margin-top: 10px; +} + +/* -- body styles ----------------------------------------------------------- */ + +a { + color: #004B6B; + text-decoration: underline; +} + +a:hover { + color: #6D4100; + text-decoration: underline; +} + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: Georgia; + font-weight: normal; + margin: 30px 0px 10px 0px; + padding: 0; +} + +div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } +div.body h2 { font-size: 180%; } +div.body h3 { font-size: 150%; } +div.body h4 { font-size: 130%; } +div.body h5 { font-size: 100%; } +div.body h6 { font-size: 100%; } + +a.headerlink { + color: #DDD; + padding: 0 4px; + text-decoration: none; +} + +a.headerlink:hover { + color: #444; + background: #EAEAEA; +} + +div.body p, div.body dd, div.body li { + line-height: 1.4em; +} + +div.admonition { + margin: 20px 0px; + padding: 10px 30px; + background-color: #EEE; + border: 1px solid #CCC; +} + +div.admonition tt.xref, div.admonition code.xref, div.admonition a tt { + background-color: #FBFBFB; + border-bottom: 1px solid #fafafa; +} + +div.admonition p.admonition-title { + font-family: Georgia; + font-weight: normal; + font-size: 24px; + margin: 0 0 10px 0; + padding: 0; + line-height: 1; +} + +div.admonition p.last { + margin-bottom: 0; +} + +div.highlight { + background-color: #fff; +} + +dt:target, .highlight { + background: #FAF3E8; +} + +div.warning { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.danger { + background-color: #FCC; + border: 1px solid #FAA; + -moz-box-shadow: 2px 2px 4px #D52C2C; + -webkit-box-shadow: 2px 2px 4px #D52C2C; + box-shadow: 2px 2px 4px #D52C2C; +} + +div.error { + background-color: #FCC; + border: 1px solid #FAA; + -moz-box-shadow: 2px 2px 4px #D52C2C; + -webkit-box-shadow: 2px 2px 4px #D52C2C; + box-shadow: 2px 2px 4px #D52C2C; +} + +div.caution { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.attention { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.important { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.note { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.tip { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.hint { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.seealso { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.topic { + background-color: #EEE; +} + +p.admonition-title { + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +pre, tt, code { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; + font-size: 0.9em; +} + +.hll { + background-color: #FFC; + margin: 0 -12px; + padding: 0 12px; + display: block; +} + +img.screenshot { +} + +tt.descname, tt.descclassname, code.descname, code.descclassname { + font-size: 0.95em; +} + +tt.descname, code.descname { + padding-right: 0.08em; +} + +img.screenshot { + -moz-box-shadow: 2px 2px 4px #EEE; + -webkit-box-shadow: 2px 2px 4px #EEE; + box-shadow: 2px 2px 4px #EEE; +} + +table.docutils { + border: 1px solid #888; + -moz-box-shadow: 2px 2px 4px #EEE; + -webkit-box-shadow: 2px 2px 4px #EEE; + box-shadow: 2px 2px 4px #EEE; +} + +table.docutils td, table.docutils th { + border: 1px solid #888; + padding: 0.25em 0.7em; +} + +table.field-list, table.footnote { + border: none; + -moz-box-shadow: none; + -webkit-box-shadow: none; + box-shadow: none; +} + +table.footnote { + margin: 15px 0; + width: 100%; + border: 1px solid #EEE; + background: #FDFDFD; + font-size: 0.9em; +} + +table.footnote + table.footnote { + margin-top: -15px; + border-top: none; +} + +table.field-list th { + padding: 0 0.8em 0 0; +} + +table.field-list td { + padding: 0; +} + +table.field-list p { + margin-bottom: 0.8em; +} + +/* Cloned from + * https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68 + */ +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +table.footnote td.label { + width: .1px; + padding: 0.3em 0 0.3em 0.5em; +} + +table.footnote td { + padding: 0.3em 0.5em; +} + +dl { + margin-left: 0; + margin-right: 0; + margin-top: 0; + padding: 0; +} + +dl dd { + margin-left: 30px; +} + +blockquote { + margin: 0 0 0 30px; + padding: 0; +} + +ul, ol { + /* Matches the 30px from the narrow-screen "li > ul" selector below */ + margin: 10px 0 10px 30px; + padding: 0; +} + +pre { + background: #EEE; + padding: 7px 30px; + margin: 15px 0px; + line-height: 1.3em; +} + +div.viewcode-block:target { + background: #ffd; +} + +dl pre, blockquote pre, li pre { + margin-left: 0; + padding-left: 30px; +} + +tt, code { + background-color: #ecf0f3; + color: #222; + /* padding: 1px 2px; */ +} + +tt.xref, code.xref, a tt { + background-color: #FBFBFB; + border-bottom: 1px solid #fff; +} + +a.reference { + text-decoration: none; + border-bottom: 1px dotted #004B6B; +} + +/* Don't put an underline on images */ +a.image-reference, a.image-reference:hover { + border-bottom: none; +} + +a.reference:hover { + border-bottom: 1px solid #6D4100; +} + +a.footnote-reference { + text-decoration: none; + font-size: 0.7em; + vertical-align: top; + border-bottom: 1px dotted #004B6B; +} + +a.footnote-reference:hover { + border-bottom: 1px solid #6D4100; +} + +a:hover tt, a:hover code { + background: #EEE; +} + + +@media screen and (max-width: 870px) { + + div.sphinxsidebar { + display: none; + } + + div.document { + width: 100%; + + } + + div.documentwrapper { + margin-left: 0; + margin-top: 0; + margin-right: 0; + margin-bottom: 0; + } + + div.bodywrapper { + margin-top: 0; + margin-right: 0; + margin-bottom: 0; + margin-left: 0; + } + + ul { + margin-left: 0; + } + + li > ul { + /* Matches the 30px from the "ul, ol" selector above */ + margin-left: 30px; + } + + .document { + width: auto; + } + + .footer { + width: auto; + } + + .bodywrapper { + margin: 0; + } + + .footer { + width: auto; + } + + .github { + display: none; + } + + + +} + + + +@media screen and (max-width: 875px) { + + body { + margin: 0; + padding: 20px 30px; + } + + div.documentwrapper { + float: none; + background: #fff; + } + + div.sphinxsidebar { + display: block; + float: none; + width: 102.5%; + margin: 50px -30px -20px -30px; + padding: 10px 20px; + background: #333; + color: #FFF; + } + + div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, + div.sphinxsidebar h3 a { + color: #fff; + } + + div.sphinxsidebar a { + color: #AAA; + } + + div.sphinxsidebar p.logo { + display: none; + } + + div.document { + width: 100%; + margin: 0; + } + + div.footer { + display: none; + } + + div.bodywrapper { + margin: 0; + } + + div.body { + min-height: 0; + padding: 0; + } + + .rtd_doc_footer { + display: none; + } + + .document { + width: auto; + } + + .footer { + width: auto; + } + + .footer { + width: auto; + } + + .github { + display: none; + } +} + + +/* misc. */ + +.revsys-inline { + display: none!important; +} + +/* Hide ugly table cell borders in ..bibliography:: directive output */ +table.docutils.citation, table.docutils.citation td, table.docutils.citation th { + border: none; + /* Below needed in some edge cases; if not applied, bottom shadows appear */ + -moz-box-shadow: none; + -webkit-box-shadow: none; + box-shadow: none; +} + + +/* relbar */ + +.related { + line-height: 30px; + width: 100%; + font-size: 0.9rem; +} + +.related.top { + border-bottom: 1px solid #EEE; + margin-bottom: 20px; +} + +.related.bottom { + border-top: 1px solid #EEE; +} + +.related ul { + padding: 0; + margin: 0; + list-style: none; +} + +.related li { + display: inline; +} + +nav#rellinks { + float: right; +} + +nav#rellinks li+li:before { + content: "|"; +} + +nav#breadcrumbs li+li:before { + content: "\00BB"; +} + +/* Hide certain items when printing */ +@media print { + div.related { + display: none; + } +} \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/_static/basic.css b/manual/v1.0.0/ja/html/_static/basic.css new file mode 100644 index 0000000..4157edf --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: inherit; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/_static/custom.css b/manual/v1.0.0/ja/html/_static/custom.css new file mode 100644 index 0000000..2a924f1 --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/custom.css @@ -0,0 +1 @@ +/* This file intentionally left blank. */ diff --git a/manual/v1.0.0/ja/html/_static/doctools.js b/manual/v1.0.0/ja/html/_static/doctools.js new file mode 100644 index 0000000..d06a71d --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/manual/v1.0.0/ja/html/_static/documentation_options.js b/manual/v1.0.0/ja/html/_static/documentation_options.js new file mode 100644 index 0000000..026c2e5 --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '1.0.0', + LANGUAGE: 'ja', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/_static/file.png b/manual/v1.0.0/ja/html/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/manual/v1.0.0/ja/html/_static/file.png differ diff --git a/manual/v1.0.0/ja/html/_static/graphviz.css b/manual/v1.0.0/ja/html/_static/graphviz.css new file mode 100644 index 0000000..8d81c02 --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/graphviz.css @@ -0,0 +1,19 @@ +/* + * graphviz.css + * ~~~~~~~~~~~~ + * + * Sphinx stylesheet -- graphviz extension. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +img.graphviz { + border: 0; + max-width: 100%; +} + +object.graphviz { + max-width: 100%; +} diff --git a/manual/v1.0.0/ja/html/_static/language_data.js b/manual/v1.0.0/ja/html/_static/language_data.js new file mode 100644 index 0000000..b368127 --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/language_data.js @@ -0,0 +1,26 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = []; + + +/* Non-minified version is copied as a separate JS file, is available */ + +/** + * Dummy stemmer for languages without stemming rules. + */ +var Stemmer = function() { + this.stemWord = function(w) { + return w; + } +} + diff --git a/manual/v1.0.0/ja/html/_static/minus.png b/manual/v1.0.0/ja/html/_static/minus.png new file mode 100644 index 0000000..d96755f Binary files /dev/null and b/manual/v1.0.0/ja/html/_static/minus.png differ diff --git a/manual/v1.0.0/ja/html/_static/plus.png b/manual/v1.0.0/ja/html/_static/plus.png new file mode 100644 index 0000000..7107cec Binary files /dev/null and b/manual/v1.0.0/ja/html/_static/plus.png differ diff --git a/manual/v1.0.0/ja/html/_static/pygments.css b/manual/v1.0.0/ja/html/_static/pygments.css new file mode 100644 index 0000000..0d49244 --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #208050 } /* Literal.Number.Bin */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #06287e } /* Name.Function.Magic */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/_static/searchtools.js b/manual/v1.0.0/ja/html/_static/searchtools.js new file mode 100644 index 0000000..7918c3f --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/searchtools.js @@ -0,0 +1,574 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + `Search finished, found ${resultCount} page(s) matching the search query.` + ); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent !== undefined) return docContent.textContent; + console.warn( + "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + /** + * execute search (requires search index to be loaded) + */ + query: (query) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + // array of [docname, title, anchor, descr, score, filename] + let results = []; + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + let score = Math.round(100 * queryLower.length / title.length) + results.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id] of foundEntries) { + let score = Math.round(100 * queryLower.length / entry.length) + results.push([ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]); + } + } + } + + // lookup as object + objectTerms.forEach((term) => + results.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); + + // now sort the results by score (in opposite order of appearance, since the + // display function below uses pop() to retrieve items) and then + // alphabetically + results.sort((a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; + }); + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + results = results.reverse(); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord) && !terms[word]) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord) && !titleTerms[word]) + arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); + }); + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) + fileMap.get(file).push(word); + else fileMap.set(file, [word]); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords) => { + const text = Search.htmlToText(htmlText); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/manual/v1.0.0/ja/html/_static/sphinx_highlight.js b/manual/v1.0.0/ja/html/_static/sphinx_highlight.js new file mode 100644 index 0000000..8a96c69 --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/manual/v1.0.0/ja/html/_static/task_view.pdf b/manual/v1.0.0/ja/html/_static/task_view.pdf new file mode 100644 index 0000000..3af0603 Binary files /dev/null and b/manual/v1.0.0/ja/html/_static/task_view.pdf differ diff --git a/manual/v1.0.0/ja/html/_static/task_view.png b/manual/v1.0.0/ja/html/_static/task_view.png new file mode 100644 index 0000000..44d038a Binary files /dev/null and b/manual/v1.0.0/ja/html/_static/task_view.png differ diff --git a/manual/v1.0.0/ja/html/_static/translations.js b/manual/v1.0.0/ja/html/_static/translations.js new file mode 100644 index 0000000..817b128 --- /dev/null +++ b/manual/v1.0.0/ja/html/_static/translations.js @@ -0,0 +1,60 @@ +Documentation.addTranslations({ + "locale": "ja", + "messages": { + "%(filename)s — %(docstitle)s": "%(filename)s — %(docstitle)s", + "© %(copyright_prefix)s %(copyright)s.": "", + ", in ": ", in ", + "About these documents": "\u3053\u306e\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8\u306b\u3064\u3044\u3066", + "Automatically generated list of changes in version %(version)s": "\u30d0\u30fc\u30b8\u30e7\u30f3 %(version)s \u306e\u5909\u66f4\u70b9\uff08\u3053\u306e\u30ea\u30b9\u30c8\u306f\u81ea\u52d5\u751f\u6210\u3055\u308c\u3066\u3044\u307e\u3059\uff09", + "C API changes": "C API \u306b\u95a2\u3059\u308b\u5909\u66f4", + "Changes in Version %(version)s — %(docstitle)s": "\u30d0\u30fc\u30b8\u30e7\u30f3 %(version)s \u306e\u5909\u66f4\u70b9 — %(docstitle)s", + "Collapse sidebar": "\u30b5\u30a4\u30c9\u30d0\u30fc\u3092\u305f\u305f\u3080", + "Complete Table of Contents": "\u7dcf\u5408\u76ee\u6b21", + "Contents": "\u30b3\u30f3\u30c6\u30f3\u30c4", + "Copyright": "\u8457\u4f5c\u6a29", + "Created using Sphinx %(sphinx_version)s.": "", + "Expand sidebar": "\u30b5\u30a4\u30c9\u30d0\u30fc\u3092\u5c55\u958b", + "Full index on one page": "\u7dcf\u7d22\u5f15", + "General Index": "\u7dcf\u5408\u7d22\u5f15", + "Global Module Index": "\u30e2\u30b8\u30e5\u30fc\u30eb\u7dcf\u7d22\u5f15", + "Go": "\u691c\u7d22", + "Hide Search Matches": "\u691c\u7d22\u7d50\u679c\u3092\u96a0\u3059", + "Index": "\u7d22\u5f15", + "Index – %(key)s": "\u7d22\u5f15 – %(key)s", + "Index pages by letter": "\u982d\u6587\u5b57\u5225\u7d22\u5f15", + "Indices and tables:": "\u7d22\u5f15\u3068\u8868\u4e00\u89a7:", + "Last updated on %(last_updated)s.": "\u6700\u7d42\u66f4\u65b0: %(last_updated)s", + "Library changes": "\u30e9\u30a4\u30d6\u30e9\u30ea\u306b\u95a2\u3059\u308b\u5909\u66f4", + "Navigation": "\u30ca\u30d3\u30b2\u30fc\u30b7\u30e7\u30f3", + "Next topic": "\u6b21\u306e\u30c8\u30d4\u30c3\u30af\u3078", + "Other changes": "\u305d\u306e\u4ed6\u306e\u5909\u66f4", + "Overview": "\u6982\u8981", + "Please activate JavaScript to enable the search\n functionality.": "\u691c\u7d22\u6a5f\u80fd\u3092\u4f7f\u3046\u306b\u306f JavaScript \u3092\u6709\u52b9\u306b\u3057\u3066\u304f\u3060\u3055\u3044\u3002", + "Preparing search...": "\u691c\u7d22\u3092\u6e96\u5099\u3057\u3066\u3044\u307e\u3059...", + "Previous topic": "\u524d\u306e\u30c8\u30d4\u30c3\u30af\u3078", + "Quick search": "\u30af\u30a4\u30c3\u30af\u691c\u7d22", + "Search": "\u691c\u7d22", + "Search Page": "\u691c\u7d22\u30da\u30fc\u30b8", + "Search Results": "\u691c\u7d22\u7d50\u679c", + "Search finished, found ${resultCount} page(s) matching the search query.": "", + "Search within %(docstitle)s": "%(docstitle)s \u5185\u3092\u691c\u7d22", + "Searching": "\u691c\u7d22\u4e2d", + "Searching for multiple words only shows matches that contain\n all words.": "\u8907\u6570\u306e\u5358\u8a9e\u3092\u691c\u7d22\u3059\u308b\u3068\u3001\u6b21\u3092\u542b\u3080\u4e00\u81f4\u306e\u307f\u304c\u8868\u793a\u3055\u308c\u307e\u3059\n \u00a0\u00a0\u00a0 \u3059\u3079\u3066\u306e\u7528\u8a9e\u3002", + "Show Source": "\u30bd\u30fc\u30b9\u30b3\u30fc\u30c9\u3092\u8868\u793a", + "Table of Contents": "\u76ee\u6b21", + "This Page": "\u3053\u306e\u30da\u30fc\u30b8", + "Welcome! This is": "Welcome! This is", + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.": "\u691c\u7d22\u3057\u305f\u6587\u5b57\u5217\u306f\u3069\u306e\u6587\u66f8\u306b\u3082\u898b\u3064\u304b\u308a\u307e\u305b\u3093\u3067\u3057\u305f\u3002\u3059\u3079\u3066\u306e\u5358\u8a9e\u304c\u6b63\u78ba\u306b\u8a18\u8ff0\u3055\u308c\u3066\u3044\u308b\u304b\u3001\u3042\u308b\u3044\u306f\u3001\u5341\u5206\u306a\u30ab\u30c6\u30b4\u30ea\u30fc\u304c\u9078\u629e\u3055\u308c\u3066\u3044\u308b\u304b\u78ba\u8a8d\u3057\u3066\u304f\u3060\u3055\u3044\u3002", + "all functions, classes, terms": "\u95a2\u6570\u3001\u30af\u30e9\u30b9\u304a\u3088\u3073\u7528\u8a9e\u7dcf\u89a7", + "can be huge": "\u5927\u304d\u3044\u5834\u5408\u304c\u3042\u308b\u306e\u3067\u6ce8\u610f", + "last updated": "\u6700\u7d42\u66f4\u65b0", + "lists all sections and subsections": "\u7ae0\uff0f\u7bc0\u4e00\u89a7", + "next chapter": "\u6b21\u306e\u7ae0\u3078", + "previous chapter": "\u524d\u306e\u7ae0\u3078", + "quick access to all modules": "\u5168\u30e2\u30b8\u30e5\u30fc\u30eb\u65e9\u898b\u8868", + "search": "\u691c\u7d22", + "search this documentation": "\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8\u3092\u691c\u7d22", + "the documentation for": "the documentation for" + }, + "plural_expr": "0" +}); \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/genindex.html b/manual/v1.0.0/ja/html/genindex.html new file mode 100644 index 0000000..802a9e2 --- /dev/null +++ b/manual/v1.0.0/ja/html/genindex.html @@ -0,0 +1,105 @@ + + + + + + + 索引 — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ + +

索引

+ +
+ +
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/index.html b/manual/v1.0.0/ja/html/index.html new file mode 100644 index 0000000..28afb83 --- /dev/null +++ b/manual/v1.0.0/ja/html/index.html @@ -0,0 +1,122 @@ + + + + + + + + Moller Users Guide — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/about/index.html b/manual/v1.0.0/ja/html/moller/about/index.html new file mode 100644 index 0000000..fe37809 --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/about/index.html @@ -0,0 +1,176 @@ + + + + + + + + 1. 概要 — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

1. 概要

+
+

1.1. mollerとは?

+

近年、機械学習を活用した物性予測や物質設計(マテリアルズインフォマティクス)が注目されています。 +機械学習の精度は、適切な教師データの準備に大きく依存しています。 +そのため、迅速に教師データを生成するためのツールや環境の整備は、 +マテリアルズインフォマティクスの研究進展に大きく貢献すると期待されます。

+

mollerは、ハイスループット計算を支援するためのパッケージHTP-Toolsの一つとして提供しています。 +mollerではスーパーコンピュータやクラスタ向けにバッチジョブスクリプトを生成するツールであり、 +多重実行の機能を利用し、パラメータ並列など一連の計算条件について並列にプログラムを実行することができます。 +現状では、東京大学 物性研究所の提供するスーパーコンピュータ ohtaka (slurmジョブスケジューラ) と kugui (PBSジョブスケジューラ)がサポートされています。

+
+
+

1.2. ライセンス

+

本ソフトウェアのプログラムパッケージおよびソースコード一式はGNU General Public License version 3 (GPL v3) に準じて配布されています。

+
+
+

1.3. 開発貢献者

+

本ソフトウェアは以下の開発貢献者により開発されています。

+
    +
  • ver.1.0.0 (2024/03/06リリース)

  • +
  • ver.1.0-beta (2023/12/28リリース)

    +
      +
    • 開発者

      +
        +
      • 吉見 一慶 (東京大学 物性研究所)

      • +
      • 青山 龍美 (東京大学 物性研究所)

      • +
      • 本山 裕一 (東京大学 物性研究所)

      • +
      • 福田 将大 (東京大学 物性研究所)

      • +
      • 井戸 康太 (東京大学 物性研究所)

      • +
      • 福島 鉄也 (産業技術総合研究所)

      • +
      • 笠松 秀輔 (山形大学 学術研究院(理学部主担当))

      • +
      • 是常 隆  (東北大学大学院理学研究科)

      • +
      +
    • +
    • プロジェクトコーディネーター

      +
        +
      • 尾崎 泰助 (東京大学 物性研究所)

      • +
      +
    • +
    +
  • +
+
+
+

1.4. コピーライト

+

© 2023- The University of Tokyo. All rights reserved.

+

本ソフトウェアは2023年度 東京大学物性研究所 ソフトウェア高度化プロジェクトの支援を受け開発されており、その著作権は東京大学が所持しています。

+
+
+

1.5. 動作環境

+

以下の環境で動作することを確認しています。

+
    +
  • Ubuntu Linux + python3

  • +
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/appendix/index.html b/manual/v1.0.0/ja/html/moller/appendix/index.html new file mode 100644 index 0000000..5651561 --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/appendix/index.html @@ -0,0 +1,313 @@ + + + + + + + + 6. 拡張ガイド — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

6. 拡張ガイド

+

(註: 以下の内容は moller のバージョンによって変わる可能性があります。)

+
+

6.1. mollerによるバルク実行

+

バルク実行とは、大型のバッチキューに投入した一つのバッチジョブの中で、複数の小さいタスクを並行して実行するというものです。動作のイメージとしては、次のようにN個のタスクをバックグラウンドで実行し同時に処理させ、wait文によりすべてのタスクが終了するまで待ちます。

+
task param_1 &
+task param_2 &
+     ...
+task param_N &
+wait
+
+
+

このとき、バッチジョブに割り当てられたノード・コアを適宜分配し、param_1〜param_N のタスクがそれぞれ別のノード・コアで実行されるように配置する必要があります。また、多数のタスクがある時に、割当てリソースに応じて最大N個のタスクが実行されるよう実行を調整することも必要です。

+

moller で生成したジョブスクリプトを以下では moller scriptと呼ぶことにします。 +moller script では、タスクの並行実行と制御には GNU parallel [1]を利用します。GNU parallel は param_1〜param_N のリストを受取り、これらを引数としてコマンドを並行して実行するツールです。 +以下は GNU parllel を使った実行イメージで、list.dat の各行に param_1〜param_N を列挙しておきます。

+
cat list.dat | parallel -j N task
+
+
+

同時実行数については、バッチジョブに割当てられたノード数・コア数を実行時に環境変数等から取得し、各タスクの並列度(ノード数・プロセス数・スレッド数)の指定(nodeパラメータ)を元に計算します。

+

ノード・コアへのタスクの配置についてはジョブスケジューラによって方法が異なります。SLURM系のジョブスケジューラでは、リソースの排他利用のオプションを使うことで、バッチジョブ内部で発行された複数の srun をジョブスケジューラが適宜配置して実行します。具体的な指定方法はプラットフォームの設定に依存します。

+

一方、PBS系のジョブスケジューラはそのような仕組みがなく、リソースの配分をmoller script内部で処理する必要があります。moller scriptでは、バッチジョブに割り当てられた計算ノードとコアをスロットに分割し、GNU parallel で並行処理されるタスクに分配します。スロットへの分割は、実行時に取得される割当てノード・コアとタスクの並列度指定から計算し、テーブルの形で保持します。タスク内部では、mpirun (mpiexec) の引数や環境変数を通じて計算ノードの指定と割当コアのピン留めを行いプログラムを実行します。この処理は使用するMPI実装に依存します。

+

参考文献

+

[1] O. Tange, GNU Parallel - The command-Line Power Tool, ;login: The USENIX Magazine, February 2011:42-47.

+
+
+

6.2. mollerの動作について

+
+

mollerで生成されるスクリプトの構成

+

mollerは、入力されたYAMLファイルの内容をもとに、バルク実行のためのジョブスクリプトを生成します。生成されるジョブスクリプトは先頭から順に次のような構成になっています。

+
    +
  1. ヘッダ

    +

    ジョブスケジューラへの指示が記述されます。platform セクションに指定した内容が、ジョブスケジューラの種類に応じた形式に整形されて出力されます。この処理はプラットフォーム依存です。

    +
  2. +
  3. プロローグ

    +

    prologue セクションに指定した内容です。code ブロックの中身がそのまま転記されます。

    +
  4. +
  5. 関数の定義

    +

    ジョブスクリプト内部で使う関数および変数の定義が出力されます。関数の概要については次節で説明します。この箇所はプラットフォーム依存です。

    +
  6. +
  7. コマンドライン引数の処理

    +

    SLURM系のジョブスケジューラでは、リストファイルの指定やタスクの再実行などのオプション指定を sbatch コマンドの引数として与えることができます。

    +

    PBS系のジョブスケジューラでは引数の指定は無視されるため、オプション指定はジョブスクリプトを編集してパラメータをセットする必要があります。リストファイルのファイル名はデフォルトで list.dat です。また、タスクのリトライを行うには retry 変数を 1 にセットします。

    +
  8. +
  9. タスクの記述

    +

    jobs セクションに記述されるタスクの内容を出力します。タスクが複数ある場合はタスクごとに以下の処理を実行します。

    +
    +

    parallel = false の場合は run ブロックの中身がそのまま転記されます。

    +

    parallel = true (デフォルト) の場合、task_タスク名 という関数が生成され、並列実行のための前処理と run ブロックの内容が出力されます。並列計算のためのキーワード(srunmpiexec または mpirun)はプラットフォームに応じたコマンドに置き換えられます。関数定義に続いて並列実行のコマンドが書き出されます。

    +
    +
  10. +
  11. エピローグ

    +

    epilogue セクションに指定した内容です。code ブロックの中身がそのまま転記されます。

    +
  12. +
+
+
+

moller scriptの関数の概要

+

moller script の内部で使用する主な関数の概要を以下に説明します。

+
    +
  • run_parallel

    +

    タスクの内容を記述した関数(タスク関数)を並行実行する関数です。並列度、タスク関数、ステータスファイル名を引数に取ります。内部では _find_multiplicity 関数を呼んで多重度を計算し、GNU parallel を起動してタスクを並行実行します。GNU parallel の多段処理に対応するために、タスク関数は _run_parallel_task 関数でラップされます。

    +

    プラットフォーム依存性は _find_multiplicity および _setup_run_parallel 関数としてくくり出しています。

    +
  • +
  • _find_multiplicity

    +

    並列実行の多重度を、割当てリソース(ノード数・コア数)とタスクの並列度指定から計算します。PBS系のジョブスケジューラでは、さらに計算ノード・コアをスロットに分割し、テーブルで保持します。 +実行時に環境から取得する情報は次の通りです。

    +
      +
    • SLURM系

      +
      +
      +
      割当てノード数 _nnodes

      SLURM_NNODES

      +
      +
      割当てコア数 _ncores

      SLURM_CPUS_ON_NODE

      +
      +
      +
      +
    • +
    • PBS系

      +
      +
      +
      割当てノード _nodes[]

      PBS_NODEFILE で指定されるファイルから計算ノードのリストを取得

      +
      +
      ノード数 _nnodes

      _nodes[] の項目数

      +
      +
      割当てコア数 _ncores
      +
      以下の順に検索されます。
        +
      • NCPUS (PBS Professional)

      • +
      • OMP_NUM_THREADS

      • +
      • platform セクションの core 指定(スクリプト中に moller_core変数として書き込まれる)

      • +
      • ヘッダの ncpus または ppn パラメータ

      • +
      +
      +
      +
      +
      +
      +
    • +
    +
  • +
  • _setup_run_parallel

    +

    GNU parallel による並行実行を開始する前にいくつか処理を追加するために呼ばれます。PBS系ではスロットに分割されたノード・コアのテーブルをタスク関数から参照できるよう export します。SLURM系では実行する内容はありません。

    +
  • +
+

各タスクに対応するタスク関数の構成については次の通りです。

+
    +
  • タスク関数の引数は 1) 並列度指定(ノード数・プロセス数・スレッド数) 2) 実行ディレクトリ 3) GNU parallel のスロットID です。

  • +
  • _setup_taskenv で実行環境の設定を行います。この関数はプラットフォーム依存です。PBS系ではスロットIDに基づいて計算ノード・コアをテーブルから取得します。SLURM系では実行する内容はありません。

  • +
  • 直前に実行するタスクが正常終了したかどうかを _is_ready 関数を呼んでチェックします。正常終了している場合はタスクの処理を継続します。それ以外の場合は -1 のステータスでタスクの処理を中断します。

  • +
  • code ブロックの内容を転記します。その際に、並列計算のためのキーワード(srunmpiexec または mpirun)はプラットフォームに応じたコマンドに置き換えられます。

  • +
+
+
+
+

6.3. mollerを他のシステムで使うには

+

mollerには現在、物性研スーパーコンピュータシステム ohtaka および kugui 向けの設定が用意されています。mollerを他のシステムで使うための拡張ガイドを以下で説明します。

+
+

クラス構成

+

mollerの構成のうちプラットフォーム依存の部分は platform/ ディレクトリにまとめています。 +クラス構成は次のとおりです。

+
digraph class_diagram {
+size="5,5"
+node[shape=record,style=filled,fillcolor=gray95]
+edge[dir=back,arrowtail=empty]
+
+Platform[label="{Platform (base.py)}"]
+BaseSlurm[label="{BaseSlurm (base_slurm.py)}"]
+BasePBS[label="{BasePBS (base_pbs.py)}"]
+BaseDefault[label="{BaseDefault (base_default.py)}"]
+
+Ohtaka[label="{Ohtaka (ohtaka.py)}"]
+Kugui[label="{Kugui (kugui.py)}"]
+Pbs[label="{Pbs (pbs.py)}"]
+Default[label="{DefaultPlatform (default.py)}"]
+
+Platform->BaseSlurm
+Platform->BasePBS
+Platform->BaseDefault
+
+BaseSlurm->Ohtaka
+BasePBS->Kugui
+BasePBS->Pbs
+BaseDefault->Default
+}
+

プラットフォームの選択についてはファクトリが用意されています。register_platform(登録名, クラス名) でクラスをファクトリに登録し、 platform/__init__.py にクラスを import しておくと、入力パラメータファイル中で platform セクションの system パラメータに指定できるようになります。

+
+
+

SLURM系ジョブスケジューラ

+

SLURM系のジョブスケジューラを利用している場合、BaseSlurm クラスを元にシステム固有の設定を行います。 +並列計算を実行するキーワードを置き換える文字列は parallel_command() メソッドの戻り値で与えます。リソースの排他利用を行うための srun のパラメータをここに指定します。 +具体例は ohtaka.py を参照してください。

+
+
+

PBS系ジョブスケジューラ

+

PBS系のジョブスケジューラ (PBS Professional, OpenPBS, Torque など)を利用している場合、BasePBS クラスを元にシステム固有の設定を行います。

+

PBS系ではバッチジョブのノード数の指定の仕方に2通りあり、PBS Professional は select=N:ncpus=n という書式で指定しますが、Torque などは node=N:ppn=n と記述します。後者の指定を用いる場合は self.pbs_use_old_format = True をセットします。

+

計算ノードのコア数は node パラメータで指定できますが、対象システムを限定してコア数のデフォルト値を設定しておくこともできます。kugui.py ではノードあたり128コアを設定しています。

+
+
+

細かいカスタマイズが必要な場合

+

基底クラスを参照して必要なメソッドを再定義します。メソッド構成は次のようになっています。

+
    +
  • setup

    +

    platform セクションのパラメータの取り出しなどを行います。

    +
  • +
  • parallel_command

    +

    並列計算のキーワード (srun, mpiexec, mpirun) を置き換える文字列を返します。

    +
  • +
  • generate_header

    +

    ジョブスケジューラオプションの指定を記述したヘッダを生成します。

    +
  • +
  • generate_function

    +

    moller script 内部で使用する関数の定義を生成します。変数および関数の実体はそれぞれ以下のメソッドで作られます。

    +
      +
    • generate_variable

    • +
    • generate_function_body

    • +
    +

    それぞれの関数は埋め込み文字列としてクラス内で定義されています。

    +
  • +
+
+
+

新しいタイプのジョブスケジューラに対応させるには

+

moller scriptの動作のうちプラットフォーム依存な箇所は、並行実行の多重度の計算、リソース配置に関する部分、並列計算のコマンドです。

+
    +
  • 割当てノード・ノード数・ノードあたりのコア数を実行時に環境変数等から取得する方法

  • +
  • 並列計算を実行するコマンド (mpiexec等) と、実行ホストやコア割当の指定のしかた

  • +
+

これらをもとにmoller script内で使う関数を作成します。 +printenv コマンドでジョブスクリプト内で有効な環境変数の一覧を取得できます。

+
+
+

トラブルシューティング

+

moller script内の _debug 変数を 1 にセットすると、バッチジョブ実行時にデバッグ出力が書き出されます。もしジョブがうまく実行されないときは、デバッグ出力を有効にして、内部パラメータが正しくセットされているかを確認してみてください。

+
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/basic-usage.html b/manual/v1.0.0/ja/html/moller/basic-usage.html new file mode 100644 index 0000000..4ff9f74 --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/basic-usage.html @@ -0,0 +1,260 @@ + + + + + + + + 2. インストールと基本的な使い方 — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

2. インストールと基本的な使い方

+

必要なライブラリ・環境

+
+

HTP-tools に含まれる網羅計算ツール moller を利用するには、以下のプログラムとライブラリが必要です。

+
    +
  • Python 3.x

  • +
  • ruamel.yaml モジュール

  • +
  • tabulate モジュール

  • +
  • GNU Parallel (ジョブスクリプトを実行するサーバ・計算ノード上にインストールされていること)

  • +
+
+

ソースコード配布サイト

+
+
+

ダウンロード方法

+
+

gitを利用できる場合は、以下のコマンドでmollerをダウンロードできます。

+
$ git clone https://github.com/issp-center-dev/Moller.git
+
+
+
+

インストール方法

+
+

mollerをダウンロード後、以下のコマンドを実行してインストールします。mollerが利用するライブラリも必要に応じてインストールされます。

+
$ cd ./Moller
+$ python3 -m pip install .
+
+
+

実行プログラム moller および moller_status がインストールされます。

+
+

ディレクトリ構成

+
+
.
+|-- LICENSE
+|-- README.md
+|-- pyproject.toml
+|-- docs/
+|   |-- ja/
+|   |-- en/
+|   |-- tutorial/
+|-- src/
+|   |-- moller/
+|       |-- __init__.py
+|       |-- main.py
+|       |-- platform/
+|       |   |-- __init__.py
+|       |   |-- base.py
+|       |   |-- base_slurm.py
+|       |   |-- base_pbs.py
+|       |   |-- base_default.py
+|       |   |-- ohtaka.py
+|       |   |-- kugui.py
+|       |   |-- pbs.py
+|       |   |-- default.py
+|       |   |-- function.py
+|       |   |-- utils.py
+|       |-- moller_status.py
+|-- sample/
+
+
+
+

基本的な使用方法

+

mollerはスーパーコンピュータ向けにバッチジョブスクリプトを生成するツールです。多重実行の機能を利用して、パラメータ並列など一連の計算条件について並列にプログラムを実行します。

+
    +
  1. 構成定義ファイルの作成

    +
    +

    mollerを使用するには、まず、計算内容を記述した構成定義ファイルをYAML形式で作成します。詳細についてはファイルフォーマットの章を参照してください。

    +
    +
  2. +
  3. コマンドの実行

    +
    +

    作成した構成定義ファイルを入力としてmollerプログラムを実行します。バッチジョブスクリプトが生成されます。

    +
    $ moller -o job.sh input.yaml
    +
    +
    +
    +
  4. +
  5. バッチジョブの実行

    +
    +

    生成されたバッチジョブスクリプトを対象となるスーパーコンピュータシステムに転送します。 +並列実行する各パラメータごとにディレクトリを用意し、 list.dat にディレクトリ名を列挙します。 +list.dat には、ジョブを実行するディレクトリからの相対パスまたは絶対パスを記述します。

    +

    リストファイルが用意できたらバッチジョブを投入します。 +以下では、物性研システムB(ohtaka)およびシステムC(kugui)で実行するケースをそれぞれ示します。

    +
      +
    • 物性研システムB(ohtaka)の場合

      +

      ohtaka では slurm ジョブスケジューラが使用されています。バッチジョブを投入するには、バッチジョブスクリプトを引数として sbatch コマンドを実行します。ジョブスクリプト名に続けてスクリプトのパラメータを渡すことができます。パラメータとしてリストファイルを指定します。

      +
      $ sbatch job.sh list.dat
      +
      +
      +

      リストファイルの指定がない場合は list.dat がデフォルトとして使われます。

      +
    • +
    • 物性研システムC(kugui)の場合

      +

      kugui では PBS ジョブスケジューラが使用されています。バッチジョブを投入するには、バッチジョブスクリプトを引数として qsub コマンドを実行します。スクリプトのパラメータの指定はできないので、リストファイルは list.dat として用意する必要があります。

      +
      $ qsub job.sh
      +
      +
      +
    • +
    +
    +
  6. +
  7. 結果の確認

    +
    +

    バッチジョブ終了後に、

    +
    $ moller_status input.yaml list.dat
    +
    +
    +

    を実行すると、各パラメータセットについて計算が正常に終了したかどうかを集計したレポートが出力されます。

    +
    +
  8. +
  9. ジョブの再開・再実行

    +
    +

    ジョブが途中で終わった場合、続けて実行するには、同じリストファイルを指定してもう一度バッチジョブを投入します。 +未実行(未完了を含む)のタスクから実行が継続されます。

    +
      +
    • 物性研システムB(ohtaka)の場合

      +

      以下のように、リストファイルを指定して sbatch コマンドを実行します。

      +
      $ sbatch job.sh list.dat
      +
      +
      +

      エラーで終了したタスクを再実行するには、--retry オプションを付けてバッチジョブを投入します。

      +
      $ sbatch job.sh --retry list.dat
      +
      +
      +
    • +
    • 物性研システムC(kugui)の場合

      +

      job.sh を編集して retry=0 の行を retry=1 に書き換えた後、

      +
      $ qsub job.sh
      +
      +
      +

      を実行します。

      +
    • +
    +
    +
  10. +
+

参考文献

+

[1] O. Tange, GNU Parallel - The command-Line Power Tool, ;login: The USENIX Magazine, February 2011:42-47.

+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/command/index.html b/manual/v1.0.0/ja/html/moller/command/index.html new file mode 100644 index 0000000..3ad70fd --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/command/index.html @@ -0,0 +1,208 @@ + + + + + + + + 4. コマンドリファレンス — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

4. コマンドリファレンス

+
+

4.1. moller

+
+

網羅計算のためのバッチジョブスクリプトを生成する

+
+

書式:

+
+
moller [-o job_script] input_yaml
+
+
+
+

説明:

+
+

input_yaml に指定した構成定義ファイルを読み込み、バッチジョブスクリプトを生成します。 +以下のオプションを受け付けます。

+
    +
  • -o, --output job_script

    +

    出力先のファイル名を指定します。構成定義ファイル内の output_file パラメータより優先されます。ファイル名の指定がない場合は標準出力に書き出されます。

    +
  • +
  • -h

    +

    ヘルプを表示します。

    +
  • +
+
+
+
+

4.2. moller_status

+
+

網羅計算ジョブの実行状況をレポートする

+
+

書式:

+
+
moller_status [-h] [--text|--csv|--html] [--ok|--failed|--skipped|--collapsed|--yet] [-o output_file] input_yaml [list_file]
+
+
+
+

説明:

+
+

mollerで生成したジョブスクリプトを実行した際に、ジョブごとの各タスクが完了したかどうかを集計してレポートを作成します。input_yaml に指定した構成定義ファイルからタスクの内容を読み込みます。ジョブのリストは list_file に指定したファイルから取得します。list_file が指定されていないときは、実行時ログファイルから収集します。 +出力形式をオプションで指定できます。デフォルトはテキスト形式です。出力先を -o または --output オプションで指定します。指定がない場合は標準出力に書き出されます。

+
    +
  • 出力モード

    +

    出力形式を指定します。以下のいずれかを指定できます。複数同時に指定した場合はエラーになります。デフォルトはテキスト形式です。

    +
      +
    • --text +テキスト形式で出力します。

    • +
    • --csv +CSV (カンマ区切りテキスト) 形式で出力します。

    • +
    • --html +HTML形式で出力します。

    • +
    +
  • +
  • input_yaml

    +

    mollerの構成定義ファイルを指定します。

    +
  • +
  • list_file

    +

    ジョブのリストを格納したファイルを指定します。指定がない場合は、バッチジョブから出力されるログファイル stat_{task}.dat から収集します。

    +
  • +
  • -o, --output output_file

    +

    出力先のファイル名を指定します。指定がない場合は標準出力に書き出されます。

    +
  • +
  • フィルタ

    +

    出力内容を指定します。以下のいずれかを指定できます。指定がない場合は全てのジョブの情報が出力されます。

    +
      +
    • --ok +全てのタスクが完了したジョブのみを表示します。

    • +
    • --failed +エラー、スキップまたは未実行のタスクがあるジョブを表示します。

    • +
    • --skipped +実行をスキップしたタスクがあるジョブを表示します。

    • +
    • --yet +未実行のタスクがあるジョブを表示します。

    • +
    • --collapsed +エラー終了したタスクがあるジョブを表示します。

    • +
    • --all +全てのジョブを表示します。(デフォルト)

    • +
    +
  • +
  • -h

    +

    ヘルプを表示します。

    +
  • +
+
+

ファイル:

+
+

mollerで生成したジョブスクリプトを用いてプログラムを並列実行すると、実行状況がログファイル stat_{task}.dat に出力されます。moller_status はこのファイルを集計し、読みやすい形式に整形します。

+
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/filespec/index.html b/manual/v1.0.0/ja/html/moller/filespec/index.html new file mode 100644 index 0000000..68bb8cb --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/filespec/index.html @@ -0,0 +1,253 @@ + + + + + + + + 5. ファイルフォーマット — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

5. ファイルフォーマット

+
+

5.1. 構成定義ファイル

+

構成定義ファイルでは、moller でバッチジョブスクリプトを生成するための設定情報を YAML形式で記述します。本ファイルは以下の部分から構成されます。

+
+
    +
  1. 全般的な記述: ジョブ名や出力ファイル名などを設定します。

  2. +
  3. platformセクション: バッチジョブを実行するシステムやバッチジョブに関する設定を記述します。

  4. +
  5. prologue, epilogue セクション: バッチジョブ内で行う環境設定や終了処理などを記述します。

  6. +
  7. jobsセクション: タスクを記述します。

  8. +
+
+
+

全体

+
+

name

+
+

バッチジョブのジョブ名を指定します。指定がない場合は空欄となります。(通常はジョブスクリプトのファイル名がジョブ名になります)

+
+

description

+
+

バッチジョブの説明を記述します。コメントとして扱われます

+
+

output_file

+
+

moller の出力先ファイル名を指定します。コマンドライン引数の指定がある場合はコマンドライン引数の指定を優先します。いずれも指定がない場合は標準出力に出力されます。

+
+
+
+
+

platform

+
+

system

+
+

対象となるシステムを指定します。現状では ohtaka と kugui が指定できます。

+
+

queue

+
+

使用するバッチキューの名称を指定します。キューの名称はシステムに依存します。

+
+

node

+
+

使用するノード数を指定します。指定方法は ノード数(整数値) または [ノード数, ノードあたりのコア数] (整数値のリスト) です。数値の範囲はシステムとキューの指定に依存します。(ノードあたりのコア数の指定はkugui,defaultのみ有効。ohtakaの場合は使われません。)

+
+

core

+
+

1ノードあたり使用するコア数を指定します。数値の範囲はシステムとキューの指定に依存します。 node パラメータに同時にノードあたりのコア数が指定されている場合、 core の指定が優先します。(kugui,defaultのみ)

+
+

elapsed

+
+

バッチジョブの実行時間を指定します。書式は HH:MM:SS です。

+
+

options

+
+

その他のバッチジョブオプションを指定します。書式は、ジョブスクリプトのオプション行の内容をリスト形式または複数行からなる文字列で記述したものです。各行の冒頭の指示語(#PBS#SBATCH など)は含めません。以下に例を示します。

+
    +
  • SLURMの場合 (文字列で指定する例)

    +
    options: |
    +  --mail-type=BEGIN,END,FAIL
    +  --mail-user=user@sample.com
    +  --requeue
    +
    +
    +
  • +
  • PBSの場合 (リストで指定する例)

    +
    options:
    +  - -m bea
    +  - -M user@sample.com
    +  - -r y
    +
    +
    +
  • +
+
+
+
+
+

prologue, epilogue

+

prologueセクションはタスク開始前に実行する内容を記述します。ライブラリやパスなど環境変数の設定等を行うのに利用できます。epilogueセクションは全タスク終了後に実行する内容を記述します。

+
+

code

+
+

処理内容をシェルスクリプトの記法で記述します。記述内容はバッチジョブスクリプト中に埋め込まれてバッチジョブ内で実行されます。

+
+
+
+
+

jobs

+

ジョブで実行する一連のタスクを、タスク名をキー、処理内容を値として記述するテーブルの形式で記述します。

+
+

キー

+
+

タスク名

+
+

+
+

以下の項目からなるテーブル:

+
+

description

+
+

タスクの説明を記述します。コメントとして扱われます。

+
+

node

+
+

並列度を指定します。指定方法は以下のいずれかです。

+
    +
  • [ プロセス数, プロセスあたりのスレッド数 ]

  • +
  • [ ノード数, プロセス数, プロセスあたりのスレッド数 ]

  • +
  • ノード数

  • +
+

ノード数を指定した場合、その数のノードが排他的にジョブに割り当てられます。ノード数を指定しない1番目の形式の場合、使用コア数が1ノードに満たないときは複数のジョブがノードに詰めて割当られます。1ノード以上を使う場合は必要ノード数を占有して実行されます。

+
+

parallel

+
+

ジョブ間で多重実行する場合は true, 逐次実行する場合は false を指定します。デフォルトは true です。

+
+

run

+
+

タスクの処理内容をシェルスクリプトの記法で記述します。MPIプログラムまたは MPI/OpenMP ハイブリッドプログラムを実行する箇所は

+
srun prog [arg1, ...]
+
+
+

と記述します。 srun の他に mpirun, mpiexec のキーワードが有効です。このキーワードは、実際のバッチジョブスクリプト中では、並列実行のためのコマンド (srunmpirun) と node パラメータで指定した並列度の設定に置き換えて記述されます。

+
+
+
+
+
+
+
+

5.2. リストファイル

+

ジョブのリストを指定します。ファイルはテキスト形式で、一行に一つのジョブ名を記述します(ディレクトリ名がジョブ名となります)。

+

mollerでは、ジョブごとにディレクトリを用意し、ジョブ内の各タスクはディレクトリに移動して実行されます。ディレクトリはバッチジョブを実行するディレクトリの直下に配置されているものと仮定します。

+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/index.html b/manual/v1.0.0/ja/html/moller/index.html new file mode 100644 index 0000000..19d5a79 --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/index.html @@ -0,0 +1,153 @@ + + + + + + + + 網羅計算ツール (moller) — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/tutorial/basic.html b/manual/v1.0.0/ja/html/moller/tutorial/basic.html new file mode 100644 index 0000000..459d8e9 --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/tutorial/basic.html @@ -0,0 +1,323 @@ + + + + + + + + 3.1. 基本的な使い方 — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

3.1. 基本的な使い方

+

網羅計算のためのバッチジョブスクリプト生成ツール moller を使うには、入力ファイルとして実行内容を記述する構成定義ファイルを用意した後、プログラム moller を実行します。生成されたバッチジョブスクリプトを対象とするスーパーコンピュータシステムに転送し、バッチジョブを投入して計算を行います。 +以下では、 docs/tutorial/moller ディレクトリにあるサンプルを例にチュートリアルを実施します。

+
+

構成定義ファイルを作成する

+

構成定義ファイルにはバッチジョブで実行する処理の内容を記述します。 +ここで、バッチジョブとはスーパーコンピュータシステム等のジョブスケジューラに投入する実行内容を指します。それに対し、moller が対象とするプログラムの多重実行において、多重実行される一つのパラメータセットでの実行内容をジョブと呼ぶことにします。一つのジョブはいくつかの処理単位からなり、その処理単位をタスクと呼びます。moller ではタスクごとに多重実行し、タスクの前後で同期がとられます。

+
+タスクとジョブ +
+

図 3.1 例: 一つのバッチジョブ内で job #1〜#3 の 3つのジョブを実行する。ジョブはそれぞれ異なるパラメータセットなどに対応する。ジョブの実行内容は task 1〜4 の一連のタスクからなる。タスクごとに job #1〜#3 の処理を並列に行う。

+
+
+

以下に構成定義ファイルのサンプルを記載します。構成定義ファイルは YAMLフォーマットのテキストファイルで、実行するプラットフォームやバッチジョブのパラメータと、タスクの処理内容、前処理・後処理を記述します。

+
name: testjob
+description: Sample task file
+
+platform:
+  system:  ohtaka
+  queue:   i8cpu
+  node:    1
+  elapsed: 00:10:00
+
+prologue:
+  code: |
+    module purge
+    module load oneapi_compiler/2023.0.0 openmpi/4.1.5-oneapi-2023.0.0-classic
+
+    ulimit -s unlimited
+
+    source /home/issp/materiapps/intel/parallel/parallelvars-20210622-1.sh
+
+jobs:
+  start:
+    parallel: false
+    run: |
+      echo "start..."
+
+  hello:
+    description: hello world
+    node: [1,1]
+    run: |
+      echo "hello world." > result.txt
+      sleep 2
+
+  hello_again:
+    description: hello world again
+    node: [1,1]
+    run: |
+      echo "hello world again." >> result.txt
+      sleep 2
+
+epilogue:
+  code: |
+    echo "done."
+    date
+
+
+

platformセクションでは、実行するプラットフォームの種類を指定します。この場合は、物性研システムB(ohtaka)での設定をしています。

+

prologueセクションでは、バッチジョブの前処理を記述します。タスクを実行する前に実行する共通のコマンドラインを記述します。

+

jobsセクションでは、タスクの処理内容を記述します。ジョブで実行する一連のタスクを、タスク名をキー、処理内容を値として記述するテーブルの形式で記述します。

+

この例では、最初に"start..."を出力するタスクを start というタスク名で定義しています。 +ここでは parallel = false に設定しています。この場合、ジョブ単位での並列は行われず、run に記述した内容が逐次的に実行されます。

+

次に、"hello world."を出力するタスクを hello world というタスク名で定義しています。 +ここでは parallel が設定されていないので、 paralle = true として扱われます。この場合、ジョブ単位での並列が行われます。 +同様に、次に "hello world again." を出力するタスクを hello_again というタスク名で定義しています。

+

最後に、epilogueセクションでは、バッチジョブの後処理を記述します。タスクを実行した後に実行する共通のコマンドラインを記述します。

+

仕様の詳細については ファイルフォーマット の章を参照してください。

+
+
+

バッチジョブスクリプトを生成する

+

構成定義ファイル(input.yaml)を入力として moller を実行します。

+
$ moller -o job.sh input.yaml
+
+
+

バッチジョブスクリプトが生成され出力されます。出力先は構成定義ファイル内のパラメータ、または、コマンドラインの -o または --output オプションで指定するファイルです。 +両方指定されている場合はコマンドラインパラメータが優先されます。いずれも指定がない場合は標準出力に書き出されます。

+

必要に応じて mollerで生成したバッチジョブスクリプトを対象のシステムに転送します。 +なお、スクリプトの種類は bash スクリプトです。ジョブ実行時に使用するシェルを bash に設定しておく必要があります。(ログインシェルを csh系などにしている場合は注意)

+
+
+

リストファイルを作成する

+

実行するジョブのリストを作成します。moller では、ジョブごとに個別のディレクトリを用意し、そのディレクトリ内で各ジョブを実行する仕様になっています。 +対象となるディレクトリのリストを格納したファイルを、たとえば以下のコマンドで、リストファイルとして作成します。

+
$ /usr/bin/ls -1d * > list.dat
+
+
+

チュートリアルには、データセットとリストファイルを作成するユーティリティープログラムが付属しています。

+
$ bash ./make_inputs.sh
+
+
+

を実行すると、 output ディレクトリの下にデータセットに相当する dataset-0001dataset-0020 のディレクトリと、リストファイル list.dat が作成されます。

+
+
+

網羅計算を実行する

+

mollerで生成したバッチジョブスクリプトをジョブスケジューラに投入します。この例ではジョブスクリプトと入力ファイルを output ディレクトリにコピーし、 output に移動してジョブを投入しています。

+
+
$ cp job.sh input.yaml output/
+$ cd output
+$ sbatch job.sh list.dat
+
+
+
+

ジョブが実行されると、リストに記載されたディレクトリにそれぞれ "result.txt" というファイルが生成されます。 +"result.txt" には、ジョブ実行結果の "hello world.", "hello world again." という文字列が出力されていることが確認できます。

+
+
+

実行状況を確認する

+

タスクの実行状況はログファイルに出力されます。ログを収集してジョブごとに実行状況を一覧するツール moller_status が用意されています。ジョブを実行するディレクトリで以下を実行します。

+
$ moller_status input.yaml list.dat
+
+
+

引数には構成定義ファイル input.yaml とリストファイル list.dat を指定します。リストファイルは省略可能で、その場合はログファイルからジョブの情報を収集します。

+

出力サンプルを以下に示します。

+
| job          | hello   | hello_again   |
+|--------------|---------|---------------|
+| dataset-0001 | o       | o             |
+| dataset-0002 | o       | o             |
+| dataset-0003 | o       | o             |
+| dataset-0004 | o       | o             |
+| dataset-0005 | o       | o             |
+| dataset-0006 | o       | o             |
+| dataset-0007 | o       | o             |
+| dataset-0008 | o       | o             |
+| dataset-0009 | o       | o             |
+| dataset-0010 | o       | o             |
+| dataset-0011 | o       | o             |
+| dataset-0012 | o       | o             |
+| dataset-0013 | o       | o             |
+| dataset-0014 | o       | o             |
+| dataset-0015 | o       | o             |
+| dataset-0016 | o       | o             |
+| dataset-0017 | o       | o             |
+| dataset-0018 | o       | o             |
+| dataset-0019 | o       | o             |
+| dataset-0020 | o       | o             |
+
+
+

「o」は正常終了したタスク、「x」はエラーになったタスク、「-」は前のタスクがエラーになったためスキップされたタスク、「.」は未実行のタスクを示します。 +今回は全て正常終了していることがわかります。

+
+
+

失敗したタスクを再実行する

+

タスクが失敗した場合、そのジョブ内の後続のタスクは実行されません。以下は、各タスクが 10% の確率で失敗するケースの実行例です。

+
| job          | task1   | task2   | task3   |
+|--------------|---------|---------|---------|
+| dataset_0001 | o       | o       | o       |
+| dataset_0002 | o       | x       | -       |
+| dataset_0003 | x       | -       | -       |
+| dataset_0004 | x       | -       | -       |
+| dataset_0005 | o       | o       | o       |
+| dataset_0006 | o       | o       | o       |
+| dataset_0007 | o       | x       | -       |
+| dataset_0008 | o       | o       | o       |
+| dataset_0009 | o       | o       | x       |
+| dataset_0010 | o       | o       | o       |
+| dataset_0011 | o       | o       | o       |
+| dataset_0012 | o       | o       | o       |
+| dataset_0013 | o       | x       | -       |
+| dataset_0014 | o       | o       | o       |
+| dataset_0015 | o       | o       | o       |
+| dataset_0016 | o       | o       | o       |
+| dataset_0017 | o       | o       | o       |
+| dataset_0018 | o       | o       | o       |
+| dataset_0019 | o       | o       | o       |
+| dataset_0020 | o       | o       | o       |
+
+
+

dataset_0003, dataset_0004 は task1 が失敗し、後続の task2, task3 は実行されていません。その他の dataset は task1 が成功し、次の task2 が実行されています。このように、各ジョブは他のジョブとは独立に実行されます。

+

失敗したタスクを再実行するには、バッチジョブに retry のオプションをつけて再実行します。 +SLURMジョブスケジューラ (例: 物性研システムB) の場合は次のようにバッチジョブを投入します。

+
$ sbatch job.sh --retry list.dat
+
+
+

PBSジョブスケジューラ (例: 物性研システムC) の場合はジョブスクリプトを編集し、 retry=0 の行を retry=1 に変更して、バッチジョブを再投入します。

+
| job          | task1   | task2   | task3   |
+|--------------|---------|---------|---------|
+| dataset_0001 | o       | o       | o       |
+| dataset_0002 | o       | o       | x       |
+| dataset_0003 | o       | x       | -       |
+| dataset_0004 | o       | o       | o       |
+| dataset_0005 | o       | o       | o       |
+| dataset_0006 | o       | o       | o       |
+| dataset_0007 | o       | o       | o       |
+| dataset_0008 | o       | o       | o       |
+| dataset_0009 | o       | o       | o       |
+| dataset_0010 | o       | o       | o       |
+| dataset_0011 | o       | o       | o       |
+| dataset_0012 | o       | o       | o       |
+| dataset_0013 | o       | o       | o       |
+| dataset_0014 | o       | o       | o       |
+| dataset_0015 | o       | o       | o       |
+| dataset_0016 | o       | o       | o       |
+| dataset_0017 | o       | o       | o       |
+| dataset_0018 | o       | o       | o       |
+| dataset_0019 | o       | o       | o       |
+| dataset_0020 | o       | o       | o       |
+
+
+

エラーになったタスクのみ再実行されます。上記の例では、dataset_0003 は task1 が再実行され正常終了し、次の task2 の実行に失敗しています。dataset_0004 は task1, task2, task3 が正常に実行されています。task3 まで全て正常終了しているデータ・セットに対しては何も実行しません。

+

なお、再実行の際にリストファイルは変更しないでください。リストファイル内の順番でジョブを管理しているため、変更すると正しく再実行されません。

+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/tutorial/dsqss.html b/manual/v1.0.0/ja/html/moller/tutorial/dsqss.html new file mode 100644 index 0000000..4a766c0 --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/tutorial/dsqss.html @@ -0,0 +1,179 @@ + + + + + + + + 3.3. DSQSS による moller 計算の例 — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

3.3. DSQSS による moller 計算の例

+
+

このチュートリアルについて

+

これは、量子多体問題の経路積分モンテカルロ法を実行するためのオープンソースソフトウェアパッケージである DSQSS を用いた moller の例です。この例では、周期境界条件下の \(S=1/2\) (DSQSSの用語では \(M=1\)) および \(S=1\) (\(M=2\)) 反強磁性ハイゼンベルク鎖の磁気感受率 \(\chi\) の温度依存性を計算します。 moller を使用することで、異なるパラメーター (\(M, L, T\)) の計算を並列に実行します。

+

この例は 公式チュートリアルの一つ に対応しています。

+
+
+

準備

+

moller (HTP-tools)パッケージと DSQSS がインストールされていることを確認してください。このチュートリアルでは、ISSP のスーパーコンピュータシステム ohtaka を使用して計算を実行します。

+
+
+

実行方法

+
    +
  1. データセットを準備する

    +

    このパッケージに含まれるスクリプト make_inputs.sh を実行します。

    +
    $ bash ./make_inputs.sh
    +
    +
    +

    これにより、 output ディレクトリが作成されます(すでに存在する場合は、まず削除し、再度作成します)。 output の下には、各パラメーター用の作業ディレクトリ(例: L_8__M_1__T_1.0)が生成されます。ディレクトリのリストは list.dat ファイルに書き込まれます。

    +
  2. +
  3. moller を使用してジョブスクリプトを生成する

    +

    ジョブ記述ファイルを使用してジョブスクリプトを生成し、 job.sh というファイル名で保存します。

    +
    $ moller -o job.sh input.yaml
    +
    +
    +

    次に、job.shoutput ディレクトリにコピーし、 output ディレクトリに移動します。

    +
  4. +
  5. バッチジョブを実行する

    +

    ジョブリストを引数としてバッチジョブを送信します。

    +
    $ sbatch job.sh list.dat
    +
    +
    +
  6. +
  7. 状態を確認する

    +

    タスク実行の状態は moller_status プログラムによってまとめられます。

    +
    $ moller_status input.yaml list.dat
    +
    +
    +
  8. +
  9. 結果を集める

    +

    計算が終了した後、結果を以下のようにして集めます。

    +
    $ python3 ../extract_result.py list.dat
    +
    +
    +

    このスクリプトは、\(M\), \(L\), \(T\), \(\chi\) の平均、および \(\chi\) の標準誤差を含む 5 列のテキストファイル result.dat に結果を書き込みます。

    +

    結果を視覚化するために、GNUPLOT ファイル plot_M1.plt および plot_M2.plt が利用可能です。

    +
    $ gnuplot --persist plot_M1.plt
    +$ gnuplot --persist plot_M2.plt
    +
    +
    +

    S=1/2 の磁気感受率 S=2 の磁気感受率

    +

    \(S=1/2\)\(S=1\) AFH 鎖の主な違いは、励起ギャップが消失するか (\(S=1/2\))、残るか (\(S=1\)) のどちらかです。 +これを反映して、非常に低温領域での磁気感受率は、有限になる (\(S=1/2\)) か、消失する (\(S=1\)) かのどちらかです。 +\(S=1/2\) の場合には、有限サイズ効果によりスピンギャップが開き、そのため小さいチェーンの磁気感受率が低下します。

    +
  10. +
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/tutorial/hphi.html b/manual/v1.0.0/ja/html/moller/tutorial/hphi.html new file mode 100644 index 0000000..c8344be --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/tutorial/hphi.html @@ -0,0 +1,191 @@ + + + + + + + + 3.2. HPhi による moller 計算の例 — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +
+

3.2. HPhi による moller 計算の例

+
+

このチュートリアルについて

+

これは、量子多体問題の正確な対角化方法を実行するためのオープンソースソフトウェアパッケージである HPhi を用いた moller の例です。 +この例では、周期境界条件下の \(S=1/2\) (2S_1 ディレクトリ) と \(S=1\) (2S_2) 反強磁性ハイゼンベルク鎖の励起ギャップ \(\Delta\) のシステムサイズ依存性を計算します。 +moller を使用することで、異なるシステムサイズの計算を並列に実行します。 +これはHPhi 公式チュートリアルの セクション 1.4 に対応しています。

+
+
+

準備

+

moller (HTP-tools)パッケージと HPhi がインストールされていることを確認してください。このチュートリアルでは、ISSP のスーパーコンピュータシステム ohtaka を使用して計算を実行します。

+
+
+

実行方法

+
    +
  1. データセットを準備する

    +

    2S_1, 2S_2 に含まれるスクリプト make_inputs.sh を実行します。

    +
    $ bash ./make_inputs.sh
    +
    +
    +

    L_8, L_10, ..., L_24 (2S_2 の場合は L_18 まで) の作業ディレクトリが生成されます。 +ディレクトリのリストは list.dat ファイルに書き込まれます。 +さらに、作業ディレクトリからエネルギーギャップを集めるためのシェルスクリプト、 extract_gap.sh が生成されます。

    +
  2. +
  3. moller を使用してジョブスクリプトを生成する

    +

    input.yaml からジョブスクリプトを生成し、 job.sh というファイル名で保存します。

    +
    $ moller -o job.sh input.yaml
    +
    +
    +
  4. +
  5. バッチジョブを実行する

    +

    ジョブリストを引数としてバッチジョブを送信します。

    +
    $ sbatch job.sh list.dat
    +
    +
    +
  6. +
  7. 状態を確認する

    +

    タスク実行の状態は moller_status プログラムによって確認できます。

    +
    $ moller_status input.yaml list.dat
    +
    +
    +
  8. +
  9. 結果を集める

    +

    計算が終了した後、ジョブからエネルギーギャップを以下のようにして集めます。

    +
    $ bash extract_gap.sh
    +
    +
    +

    このスクリプトは、長さ \(L\) とギャップ \(\Delta\) のペアをテキストファイル gap.dat に書き込みます。

    +

    結果を視覚化するために、Gnuplot ファイル gap.plt が利用可能です。 +このファイルでは、得られたギャップデータが予想される曲線によってフィットされます。

    +
    +(3.1)\[\Delta(L; S=1/2) = \Delta_\infty + A/L\]
    +

    および

    +
    +(3.2)\[\Delta(L; S=1) = \Delta_\infty + B\exp(-CL).\]
    +

    グラフは次のコマンドで描画できます。

    +
    $ gnuplot --persist gap.plt
    +
    +
    +
    +スピンギャップの有限サイズ効果 +
    +

    図 3.3 スピンギャップの有限サイズ効果

    +
    +
    +

    \(S=1/2\) の場合、対数補正によりスピンギャップは有限のままです。一方で、\(S=1\) の場合、外挿値 \(\Delta_\infty = 0.417(1)\) は以前の結果(例えば、QMC による \(\Delta_\infty = 0.41048(6)\) (Todo and Kato, PRL 87, 047203 (2001)))とよくあっています。

    +
  10. +
+
+
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/moller/tutorial/index.html b/manual/v1.0.0/ja/html/moller/tutorial/index.html new file mode 100644 index 0000000..b9a4989 --- /dev/null +++ b/manual/v1.0.0/ja/html/moller/tutorial/index.html @@ -0,0 +1,145 @@ + + + + + + + + 3. チュートリアル — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/objects.inv b/manual/v1.0.0/ja/html/objects.inv new file mode 100644 index 0000000..103f5bd Binary files /dev/null and b/manual/v1.0.0/ja/html/objects.inv differ diff --git a/manual/v1.0.0/ja/html/search.html b/manual/v1.0.0/ja/html/search.html new file mode 100644 index 0000000..b069925 --- /dev/null +++ b/manual/v1.0.0/ja/html/search.html @@ -0,0 +1,123 @@ + + + + + + + 検索 — Moller Users Guide 1.0.0 ドキュメント + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + +
+ +

検索

+ + + + +

+ 複数の単語を検索すると、次を含む一致のみが表示されます +     すべての用語。 +

+ + +
+ + + +
+ + + +
+ +
+ + +
+ +
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/manual/v1.0.0/ja/html/searchindex.js b/manual/v1.0.0/ja/html/searchindex.js new file mode 100644 index 0000000..5f57c38 --- /dev/null +++ b/manual/v1.0.0/ja/html/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["index", "moller/about/index", "moller/appendix/index", "moller/basic-usage", "moller/command/index", "moller/filespec/index", "moller/index", "moller/tutorial/basic", "moller/tutorial/dsqss", "moller/tutorial/hphi", "moller/tutorial/index"], "filenames": ["index.rst", "moller/about/index.rst", "moller/appendix/index.rst", "moller/basic-usage.rst", "moller/command/index.rst", "moller/filespec/index.rst", "moller/index.rst", "moller/tutorial/basic.rst", "moller/tutorial/dsqss.rst", "moller/tutorial/hphi.rst", "moller/tutorial/index.rst"], "titles": ["Moller Users Guide", "1. \u6982\u8981", "6. \u62e1\u5f35\u30ac\u30a4\u30c9", "2. \u30a4\u30f3\u30b9\u30c8\u30fc\u30eb\u3068\u57fa\u672c\u7684\u306a\u4f7f\u3044\u65b9", "4. \u30b3\u30de\u30f3\u30c9\u30ea\u30d5\u30a1\u30ec\u30f3\u30b9", "5. \u30d5\u30a1\u30a4\u30eb\u30d5\u30a9\u30fc\u30de\u30c3\u30c8", "\u7db2\u7f85\u8a08\u7b97\u30c4\u30fc\u30eb (moller)", "3.1. \u57fa\u672c\u7684\u306a\u4f7f\u3044\u65b9", "3.3. DSQSS \u306b\u3088\u308b moller \u8a08\u7b97\u306e\u4f8b", "3.2. HPhi \u306b\u3088\u308b moller \u8a08\u7b97\u306e\u4f8b", "3. \u30c1\u30e5\u30fc\u30c8\u30ea\u30a2\u30eb"], "terms": {"\u7db2\u7f85": [0, 3, 4, 10], "\u8a08\u7b97": [0, 1, 2, 3, 4, 10], "\u30c4\u30fc\u30eb": [0, 1, 2, 3, 7], "\u6982\u8981": [0, 6], "\u30a4\u30f3\u30b9\u30c8\u30fc\u30eb": [0, 6, 8, 9], "\u57fa\u672c": [0, 6, 10], "\u4f7f\u3044\u65b9": [0, 6, 10], "\u30c1\u30e5\u30fc\u30c8\u30ea\u30a2\u30eb": [0, 6, 7], "\u30b3\u30de\u30f3\u30c9\u30ea\u30d5\u30a1\u30ec\u30f3\u30b9": [0, 6], "\u30d5\u30a1\u30a4\u30eb\u30d5\u30a9\u30fc\u30de\u30c3\u30c8": [0, 3, 6, 7], "\u62e1\u5f35": [0, 6], "\u30ac\u30a4\u30c9": [0, 6], "\u8fd1\u5e74": 1, "\u6a5f\u68b0": 1, "\u5b66\u7fd2": 1, "\u6d3b\u7528": 1, "\u7269\u6027": [1, 2, 3, 7], "\u4e88\u6e2c": 1, "\u7269\u8cea": 1, "\u8a2d\u8a08": 1, "\u30de\u30c6\u30ea\u30a2\u30eb\u30ba\u30a4\u30f3\u30d5\u30a9\u30de\u30c6\u30a3\u30af\u30b9": 1, "\u6ce8\u76ee": 1, "\u307e\u3059": [1, 2, 3, 4, 5, 7, 8, 9], "\u7cbe\u5ea6": 1, "\u9069\u5207": 1, "\u6559\u5e2b": 1, "\u30c7\u30fc\u30bf": [1, 7], "\u6e96\u5099": [1, 10], "\u5927\u304d\u304f": 1, "\u4f9d\u5b58": [1, 2, 5, 8, 9], "\u305d\u306e": [1, 2, 5, 7, 8], "\u305f\u3081": [1, 2, 4, 5, 7, 8, 9], "\u8fc5\u901f": 1, "\u751f\u6210": [1, 3, 4, 5, 8, 9, 10], "\u3059\u308b": [1, 2, 3, 4, 5, 8, 9, 10], "\u6574\u5099": 1, "\u7814\u7a76": 1, "\u9032\u5c55": 1, "\u671f\u5f85": 1, "\u30cf\u30a4\u30b9\u30eb\u30fc\u30d7\u30c3\u30c8": 1, "\u652f\u63f4": 1, "\u30d1\u30c3\u30b1\u30fc\u30b8htp": 1, "-tools": [1, 3, 8, 9], "\u4e00\u3064": [1, 2, 5, 7, 8], "\u3068\u3057\u3066": [1, 2, 3, 5, 7, 8, 9], "\u63d0\u4f9b": 1, "\u30b9\u30fc\u30d1\u30fc\u30b3\u30f3\u30d4\u30e5\u30fc\u30bf": [1, 3], "\u30af\u30e9\u30b9\u30bf": 1, "\u5411\u3051": [1, 2, 3], "\u30d0\u30c3\u30c1\u30b8\u30e7\u30d6\u30b9\u30af\u30ea\u30d7\u30c8": [1, 3, 4, 5, 10], "\u3042\u308a": [1, 2, 3, 7], "\u591a\u91cd": [1, 2, 3, 5, 7], "\u5b9f\u884c": [1, 3, 4, 5, 6, 10], "\u6a5f\u80fd": [1, 3], "\u5229\u7528": [1, 2, 3, 5, 8, 9], "\u30d1\u30e9\u30e1\u30fc\u30bf": [1, 2, 3, 4, 5, 7], "\u4e26\u5217": [1, 2, 3, 4, 5, 7, 8, 9], "\u306a\u3069": [1, 2, 3, 5, 7], "\u4e00\u9023": [1, 3, 5, 7], "\u6761\u4ef6": [1, 3, 8, 9], "\u306b\u3064\u3044\u3066": [1, 3, 6, 7, 10], "\u30d7\u30ed\u30b0\u30e9\u30e0": [1, 2, 3, 4, 5, 7, 8, 9], "\u3053\u3068": [1, 2, 3, 7, 8, 9], "\u3067\u304d": [1, 2, 3, 4, 5, 7, 9], "\u73fe\u72b6": [1, 5], "\u6771\u4eac": 1, "\u5927\u5b66": 1, "ohtaka": [1, 2, 3, 5, 7, 8, 9], "slurm": [1, 3, 5, 7], "\u30b8\u30e7\u30d6\u30b9\u30b1\u30b8\u30e5\u30fc\u30e9": [1, 3, 7], "kugui": [1, 2, 3, 5], "pbs": [1, 3, 5, 7], "\u30b5\u30dd\u30fc\u30c8": 1, "\u30bd\u30d5\u30c8\u30a6\u30a7\u30a2": 1, "\u30d7\u30ed\u30b0\u30e9\u30e0\u30d1\u30c3\u30b1\u30fc\u30b8": 1, "\u304a\u3088\u3073": [1, 2, 3, 8, 9], "\u30bd\u30fc\u30b9\u30b3\u30fc\u30c9": [1, 3], "\u4e00\u5f0f": 1, "gnu": [1, 2, 3], "general": 1, "public": 1, "license": [1, 3], "version": 1, "gpl": 1, "\u6e96\u3058": 1, "\u914d\u5e03": [1, 3], "\u4ee5\u4e0b": [1, 2, 3, 4, 5, 7, 8, 9], "\u306b\u3088\u308a": [1, 8, 9], "ver": 1, "\u30ea\u30ea\u30fc\u30b9": 1, "beta": 1, "\u5409\u898b": 1, "\u4e00\u6176": 1, "\u7814\u7a76\u6240": 1, "\u9752\u5c71": 1, "\u9f8d\u7f8e": 1, "\u672c\u5c71": 1, "\u88d5\u4e00": 1, "\u798f\u7530": 1, "\u5c06\u5927": 1, "\u4e95\u6238": 1, "\u5eb7\u592a": 1, "\u798f\u5cf6": 1, "\u9244\u4e5f": 1, "\u7523\u696d": 1, "\u6280\u8853": 1, "\u7dcf\u5408": 1, "\u7b20\u677e": 1, "\u79c0\u8f14": 1, "\u5c71\u5f62": 1, "\u5b66\u8853": 1, "\u7814\u7a76\u9662": 1, "\u7406\u5b66": 1, "\u90e8\u4e3b": 1, "\u62c5\u5f53": 1, "))": 1, "\u662f\u5e38": 1, "\u6771\u5317": 1, "\u5927\u5b66\u9662": 1, "\u30d7\u30ed\u30b8\u30a7\u30af\u30c8\u30b3\u30fc\u30c7\u30a3\u30cd\u30fc\u30bf\u30fc": 1, "\u5c3e\u5d0e": 1, "\u6cf0\u52a9": 1, "the": [1, 2, 3], "university": 1, "of": 1, "tokyo": 1, "all": 1, "rights": 1, "reserved": 1, "\u5e74\u5ea6": 1, "\u9ad8\u5ea6": 1, "\u30d7\u30ed\u30b8\u30a7\u30af\u30c8": 1, "\u53d7\u3051": [1, 4], "\u304a\u308a": 1, "\u8457\u4f5c": 1, "\u6240\u6301": 1, "\u78ba\u8a8d": [1, 2, 3, 8, 9, 10], "ubuntu": 1, "linux": 1, "python": [1, 3, 8], "(\u8a3b": 2, "\u5185\u5bb9": [2, 3, 4, 5, 7], "\u30d0\u30fc\u30b8\u30e7\u30f3": 2, "\u306b\u3088\u3063\u3066": 2, "\u5909\u308f\u308b": 2, "\u53ef\u80fd": [2, 7, 8, 9], "\u3002)": [2, 5], "\u5927\u578b": 2, "\u30d0\u30c3\u30c1\u30ad\u30e5\u30fc": [2, 5], "\u6295\u5165": [2, 3, 7], "\u30d0\u30c3\u30c1\u30b8\u30e7\u30d6": [2, 3, 4, 5, 7, 8, 9], "\u8907\u6570": [2, 4, 5], "\u5c0f\u3055\u3044": [2, 8], "\u30bf\u30b9\u30af": [2, 3, 4, 5, 8, 9, 10], "\u4e26\u884c": 2, "\u3068\u3044\u3046": [2, 7, 8, 9], "\u3082\u306e": [2, 5], "\u3067\u3059": [2, 3, 4, 5, 7, 8, 9], "\u30a4\u30e1\u30fc\u30b8": 2, "\u3088\u3046": [2, 3, 7, 8, 9], "n\u500b": 2, "\u30d0\u30c3\u30af\u30b0\u30e9\u30a6\u30f3\u30c9": 2, "\u540c\u6642": [2, 4, 5], "\u51e6\u7406": [2, 5, 7], "wait": 2, "\u306b\u3088\u308a\u3059\u3079": 2, "\u7d42\u4e86": [2, 3, 4, 5, 7, 8, 9], "\u307e\u3067": [2, 7, 9], "\u5f85\u3061": 2, "task": [2, 4, 7], "param": 2, "...": [2, 7], "_n": 2, "\u3053\u306e": [2, 4, 5, 7, 10], "\u3068\u304d": [2, 4, 5], "\u5272\u308a": [2, 5], "\u5f53\u3066": [2, 5], "\u3089\u308c": [2, 5, 8, 9], "\u30ce\u30fc\u30c9": [2, 3, 5], "\u30fb\u30b3\u30a2": 2, "\u9069\u5b9c": 2, "\u5206\u914d": 2, "\u305d\u308c\u305e\u308c": [2, 3, 7], "\u914d\u7f6e": [2, 5], "\u3002\u307e\u305f": 2, "\u591a\u6570": 2, "\u3042\u308b": [2, 4, 5, 7, 8, 9], "\u6642\u306b": 2, "\u5272\u5f53": [2, 5], "\u30ea\u30bd\u30fc\u30b9": 2, "\u5fdc\u3058": [2, 3, 7], "\u6700\u5927": 2, "\u8abf\u6574": 2, "\u30b8\u30e7\u30d6\u30b9\u30af\u30ea\u30d7\u30c8": [2, 3, 4, 5, 7, 8, 9], "\u547c\u3076": [2, 7], "\u5236\u5fa1": 2, "parallel": [2, 3, 5, 7], "\u3002gnu": 2, "\u30ea\u30b9\u30c8": [2, 4, 5, 7, 8, 9], "\u53d7\u53d6\u308a": 2, "\u3053\u308c\u3089": 2, "\u5f15\u6570": [2, 3, 5, 7, 8, 9], "\u30b3\u30de\u30f3\u30c9": [2, 3, 5, 7, 9], "parllel": 2, "\u4f7f\u3063": 2, "list": [2, 3, 4, 7, 8, 9], ".dat": [2, 3, 7, 8, 9], "\u5404\u884c": [2, 5], "\u5217\u6319": [2, 3], "\u304a\u304d": 2, "cat": 2, "\u6642\u5b9f": 2, "\u884c\u6570": 2, "\u74b0\u5883": [2, 3, 5, 6], "\u5909\u6570": [2, 5], "\u304b\u3089": [2, 3, 4, 5, 7, 9], "\u53d6\u5f97": [2, 4], "\u30fb\u30d7\u30ed\u30bb\u30b9": 2, "\u30fb\u30b9\u30ec\u30c3\u30c9": 2, "\u6307\u5b9a": [2, 3, 4, 5, 7], "(node": 2, "\u306b\u3088\u3063": [2, 8, 9], "\u65b9\u6cd5": [2, 3, 5, 10], "\u7570\u306a\u308a": 2, "\u3002slurm": 2, "\u6392\u4ed6": [2, 5], "\u30aa\u30d7\u30b7\u30e7\u30f3": [2, 3, 4, 5, 7], "\u5185\u90e8": 2, "\u767a\u884c": 2, "srun": [2, 5], "\u5177\u4f53": 2, "\u30d7\u30e9\u30c3\u30c8\u30d5\u30a9\u30fc\u30e0": [2, 7], "\u8a2d\u5b9a": [2, 5, 7], "\u4e00\u65b9": [2, 9], "\u4ed5\u7d44": 2, "\u306a\u304f": 2, "\u914d\u5206": 2, "\u3002moller": [2, 3, 4, 7], "\u30b3\u30a2": [2, 5], "\u30b9\u30ed\u30c3\u30c8": 2, "\u5206\u5272": 2, "\u30c6\u30fc\u30d6\u30eb": [2, 5, 7], "\u4fdd\u6301": 2, "mpirun": [2, 5], "mpiexec": [2, 5], "\u901a\u3058": 2, "\u5272\u5f53\u30b3\u30a2": 2, "\u30d4\u30f3": 2, "\u7559\u3081": 2, "\u884c\u3044": [2, 7], "\u4f7f\u7528": [2, 3, 5, 7, 8, 9], "mpi": [2, 5], "\u5b9f\u88c5": 2, "\u53c2\u8003": [2, 3], "\u6587\u732e": [2, 3], "[1": [2, 3], "tange": [2, 3], "command": [2, 3], "-line": [2, 3], "power": [2, 3], "tool": [2, 3], "login": [2, 3], "usenix": [2, 3], "magazine": [2, 3], "february": [2, 3], "\u5165\u529b": [2, 3, 7], "yaml": [2, 3, 5, 7], "\u30d5\u30a1\u30a4\u30eb": [2, 3, 4, 6, 8, 9, 10], "\u3082\u3068": 2, "\u5148\u982d": 2, "\u306a\u3063": [2, 7], "\u30d8\u30c3\u30c0": 2, "\u6307\u793a": [2, 5], "\u8a18\u8ff0": [2, 3, 5, 7, 8], "\u3002platform": 2, "\u30bb\u30af\u30b7\u30e7\u30f3": [2, 5, 7, 9], "\u7a2e\u985e": [2, 7], "\u5f62\u5f0f": [2, 3, 4, 5, 7], "\u6574\u5f62": [2, 4], "\u51fa\u529b": [2, 3, 4, 5, 7], "\u30d7\u30ed\u30ed\u30fc\u30b0": 2, "prologue": [2, 7], "\u3002code": 2, "\u30d6\u30ed\u30c3\u30af": 2, "\u4e2d\u8eab": 2, "\u307e\u307e": 2, "\u8ee2\u8a18": 2, "\u5b9a\u7fa9": [2, 3, 4, 6, 10], "\u6b21\u7bc0": 2, "\u8aac\u660e": [2, 4, 5], "\u7b87\u6240": [2, 5], "\u30b3\u30de\u30f3\u30c9\u30e9\u30a4\u30f3": [2, 5, 7], "\u30ea\u30b9\u30c8\u30d5\u30a1\u30a4\u30eb": [2, 3, 6, 10], "\u518d\u5b9f\u884c": [2, 3, 10], "sbatch": [2, 3, 7, 8, 9], "\u4e0e\u3048\u308b": 2, "\u7121\u8996": 2, "\u7de8\u96c6": [2, 3, 7], "\u30bb\u30c3\u30c8": [2, 7], "\u30c7\u30d5\u30a9\u30eb\u30c8": [2, 3, 4, 5], "\u30ea\u30c8\u30e9\u30a4": 2, "\u884c\u3046": [2, 5, 7], "retry": [2, 3, 7], "jobs": [2, 7], "\u3054\u3068": [2, 3, 4, 5, 7], "false": [2, 5, 7], "run": [2, 5, 7], "true": [2, 5, 7], "\u524d\u51e6": [2, 7], "\u30ad\u30fc\u30ef\u30fc\u30c9": [2, 5], "\u307e\u305f": [2, 3, 4, 5, 7], "\u7f6e\u304d": 2, "\u63db\u3048": [2, 3], "\u7d9a\u3044": 2, "\u66f8\u304d\u51fa\u3055": [2, 4, 7], "\u30a8\u30d4\u30ed\u30fc\u30b0": 2, "epilogue": [2, 7], "_parallel": 2, "\u30b9\u30c6\u30fc\u30bf\u30b9\u30d5\u30a1\u30a4\u30eb": 2, "\u53d6\u308a": 2, "_find": 2, "_multiplicity": 2, "\u547c\u3093": 2, "\u8d77\u52d5": 2, "\u591a\u6bb5": 2, "_run": 2, "_task": 2, "\u30e9\u30c3\u30d7": 2, "_setup": 2, "\u304f\u304f\u308a": 2, "\u51fa\u3057": 2, "\u3002pbs": 2, "\u3055\u3089\u306b": [2, 9], "\u60c5\u5831": [2, 4, 5, 7], "\u901a\u308a": 2, "nnodes": 2, "_nnodes": 2, "ncores": 2, "_cpus": 2, "_on": 2, "_node": 2, "nodes": 2, "[]": 2, "_nodefile": 2, "_nodes": 2, "\u9805\u76ee": [2, 5], "\u691c\u7d22": 2, "ncpus": 2, "(pbs": 2, "professional": 2, "omp": 2, "_num": 2, "_threads": 2, "platform": [2, 3, 7], "core": [2, 5], "_core": 2, "\u66f8\u304d\u8fbc\u307e": [2, 8, 9], "ppn": 2, "\u958b\u59cb": [2, 5], "\u3044\u304f\u3064\u304b": [2, 7], "\u8ffd\u52a0": 2, "\u547c\u3070": 2, "\u53c2\u7167": [2, 3, 7], "\u3067\u304d\u308b": [2, 3], "export": 2, "\u307e\u305b": [2, 5, 7], "\u30c7\u30a3\u30ec\u30af\u30c8\u30ea": [2, 3, 5, 7, 8, 9], "id": 2, "_taskenv": 2, "\u57fa\u3065\u3044": 2, "\u76f4\u524d": 2, "\u6b63\u5e38": [2, 3, 7], "\u304b\u3069\u3046": [2, 3, 4], "_is": 2, "_ready": 2, "\u30c1\u30a7\u30c3\u30af": 2, "\u3044\u308b": [2, 3, 5, 7, 8, 9], "\u7d99\u7d9a": [2, 3], "\u305d\u308c": [2, 7], "\u4ee5\u5916": 2, "\u30b9\u30c6\u30fc\u30bf\u30b9": 2, "\u4e2d\u65ad": 2, "code": [2, 5, 7], "\u305d\u306e\u969b": 2, "\u73fe\u5728": 2, "\u30b9\u30fc\u30d1\u30fc\u30b3\u30f3\u30d4\u30e5\u30fc\u30bf\u30b7\u30b9\u30c6\u30e0": [2, 3, 7, 8, 9], "\u7528\u610f": [2, 3, 5, 7], "\u3046\u3061": 2, "\u90e8\u5206": [2, 5], "\u307e\u3068\u3081": [2, 8], "\u3068\u304a\u308a": 2, "\u9078\u629e": 2, "\u30d5\u30a1\u30af\u30c8\u30ea": 2, "register": 2, "_platform": 2, "\u767b\u9332": 2, "/_": 2, "init": [2, 3], "__": [2, 3, 8], "py": [2, 3], "import": 2, "\u3057\u3066": [2, 8, 9], "\u304a\u304f": [2, 7], "\u30d1\u30e9\u30e1\u30fc\u30bf\u30d5\u30a1\u30a4\u30eb": 2, "system": [2, 5, 7], "\u306a\u308a": [2, 4, 5, 7], "baseslurm": 2, "\u56fa\u6709": 2, "\u63db\u3048\u308b": 2, "\u6587\u5b57": [2, 5, 7], "_command": 2, "()": 2, "\u30e1\u30bd\u30c3\u30c9": 2, "\u623b\u308a\u5024": 2, "\u4e0e\u3048": 2, "\u3053\u3053": 2, "\u5177\u4f53\u4f8b": 2, ".py": [2, 3, 8], "\u304f\u3060": [2, 3, 7, 8, 9], "\u3055\u3044": [2, 3, 7, 8, 9], "openpbs": 2, "torque": 2, "basepbs": 2, "\u4ed5\u65b9": 2, "2\u901a\u308a": 2, "select": 2, "=n": 2, ":ncpus": 2, "\u66f8\u5f0f": [2, 4, 5], "node": [2, 5, 7], ":ppn": 2, "\u5f8c\u8005": 2, "\u7528\u3044\u308b": 2, "self": 2, ".pbs": 2, "_use": 2, "_old": 2, "_format": 2, "\u5bfe\u8c61": [2, 3, 5, 7], "\u9650\u5b9a": 2, "\u3002kugui": 2, "\u3042\u305f\u308a": [2, 5], "\u57fa\u5e95": 2, "\u518d\u5b9a": 2, "setup": 2, "\u53d6\u308a\u51fa\u3057": 2, "\u8fd4\u3057": 2, "generate": 2, "_header": 2, "\u30b8\u30e7\u30d6\u30b9\u30b1\u30b8\u30e5\u30fc\u30e9\u30aa\u30d7\u30b7\u30e7\u30f3": 2, "_function": 2, "\u5b9f\u4f53": 2, "\u4f5c\u3089": 2, "_variable": 2, "_body": 2, "\u57cb\u3081\u8fbc\u307f": 2, "\u306b\u95a2\u3059\u308b": [2, 5], "\u30fb\u30ce\u30fc\u30c9": 2, "\u30db\u30b9\u30c8": 2, "\u3057\u304b": 2, "\u4f5c\u6210": [2, 3, 4, 8, 10], "printenv": 2, "\u6709\u52b9": [2, 5], "\u4e00\u89a7": [2, 7], "_debug": 2, "\u30c7\u30d0\u30c3\u30b0": 2, "\u3082\u3057": 2, "\u30b8\u30e7\u30d6": [2, 3, 4, 5, 7, 8, 9], "\u3046\u307e\u304f": 2, "\u306a\u3044": [2, 3, 4, 5, 7], "\u6b63\u3057\u304f": [2, 7], "\u307f\u3066": 2, "\u3060\u3055\u3044": [2, 7], "\u5fc5\u8981": [3, 5, 7], "\u30e9\u30a4\u30d6\u30e9\u30ea": [3, 5], "htp": 3, "\u542b\u307e\u308c\u308b": [3, 8, 9], "moller": [3, 5, 7, 10], "ruamel": 3, ".yaml": [3, 7, 8, 9], "\u30e2\u30b8\u30e5\u30fc\u30eb": 3, "tabulate": 3, "\u30b5\u30fc\u30d0": 3, "\u30b5\u30a4\u30c8": 3, "github": 3, "\u30ea\u30dd\u30b8\u30c8\u30ea": 3, "\u30c0\u30a6\u30f3\u30ed\u30fc\u30c9": 3, "git": 3, "\u5834\u5408": [3, 4, 5, 7, 8, 9], "clone": 3, "https": 3, ":/": 3, ".com": [3, 5], "/issp": [3, 7], "-center": 3, "-dev": 3, "/moller": [3, 7], ".git": 3, "cd": [3, 7], "pip": 3, "install": 3, "_status": [3, 6, 7, 8, 9], "\u69cb\u6210": [3, 4, 6, 10], "--": [3, 4, 5, 7], "readme": 3, ".md": 3, "pyproject": 3, ".toml": 3, "docs": [3, 7], "|-": [3, 4, 7], "ja": 3, "en": 3, "tutorial": 3, "src": 3, "main": 3, "base": 3, "_slurm": 3, "_pbs": 3, "_default": 3, "default": [3, 5], "function": 3, "utils": 3, "sample": [3, 7], "\u307e\u305a": [3, 8], "\u8a73\u7d30": [3, 7], "job": [3, 4, 7, 8, 9], ".sh": [3, 7, 8, 9], "input": [3, 4, 7, 8, 9], "\u306a\u308b": [3, 5, 7, 8, 9], "\u8ee2\u9001": [3, 7], "\u76f8\u5bfe": 3, "\u30d1\u30b9": [3, 5], "\u7d76\u5bfe": 3, "\u305f\u3089": 3, "\u30b7\u30b9\u30c6\u30e0b": [3, 7], "(ohtaka": [3, 7], "\u30b7\u30b9\u30c6\u30e0c": [3, 7], "(kugui": 3, "\u30b1\u30fc\u30b9": [3, 7], "\u793a\u3057": [3, 5, 7], "\u7d9a\u3051": 3, "\u30b9\u30af\u30ea\u30d7\u30c8": [3, 7, 8, 9], "\u6e21\u3059": 3, "\u4f7f\u308f": [3, 5], "qsub": 3, "\u7d50\u679c": [3, 7, 8, 9], "\u30d1\u30e9\u30e1\u30fc\u30bf\u30bb\u30c3\u30c8": [3, 7], "\u96c6\u8a08": [3, 4], "\u30ec\u30dd\u30fc\u30c8": [3, 4], "\u518d\u958b": 3, "\u9014\u4e2d": 3, "\u7d42\u308f\u3063": 3, "\u540c\u3058": 3, "\u3082\u3046": 3, "\u4e00\u5ea6": 3, "\u672a\u5b9f\u884c": [3, 4, 7], "\u672a\u5b8c": 3, "\u542b\u3080": [3, 8], "\u30a8\u30e9\u30fc": [3, 4, 7], "\u3001-": 3, "\u4ed8\u3051": [3, 4], "\u66f8\u304d": 3, "_script": 4, "_yaml": 4, "\u8aad\u307f\u8fbc\u307f": 4, "-o": 4, "output": [4, 5, 7, 8], "\u51fa\u529b\u5148": 4, "_file": [4, 5], "\u3088\u308a": 4, "\u512a\u5148": [4, 5, 7], "\u6a19\u6e96": [4, 5, 7, 8], "-h": 4, "\u30d8\u30eb\u30d7": 4, "\u8868\u793a": 4, "\u72b6\u6cc1": [4, 10], "[--": 4, "text": 4, "csv": 4, "html": 4, "ok": 4, "failed": 4, "skipped": 4, "collapsed": 4, "yet": 4, "[-o": 4, "\u5b8c\u4e86": 4, "\u3002input": 4, "\u3002list": 4, "\u30ed\u30b0\u30d5\u30a1\u30a4\u30eb": [4, 7], "\u53ce\u96c6": [4, 7], "\u30c6\u30ad\u30b9\u30c8": [4, 5], "\u30e2\u30fc\u30c9": 4, "\u306e\u3044": [4, 5], "\u305a\u308c": [4, 5, 7], "-text": 4, "-csv": 4, "\u30ab\u30f3\u30de": 4, "\u533a\u5207\u308a": 4, "-html": 4, "\u683c\u7d0d": [4, 7], "\u308c\u308b": [4, 7, 9], "stat": 4, "_{": 4, "}.": 4, "dat": 4, "\u30d5\u30a3\u30eb\u30bf": 4, "\u5168\u3066": 4, "-ok": 4, "\u306e\u307f": [4, 5, 7], "-failed": 4, "\u30b9\u30ad\u30c3\u30d7": [4, 7], "-skipped": 4, "-yet": 4, "-collapsed": 4, "-all": 4, "\u3002(": [4, 5, 7], "\u7528\u3044": [4, 8, 9], "\u8aad\u307f": 4, "\u3084\u3059\u3044": 4, "\u5168\u822c": 5, "\u30b7\u30b9\u30c6\u30e0": [5, 6, 7], "name": [5, 7], "\u7a7a\u6b04": 5, "\u901a\u5e38": 5, "description": [5, 7], "\u30b3\u30e1\u30f3\u30c8": 5, "\u6271\u308f": [5, 7], "\u3002\u3044": [5, 7], "queue": [5, 7], "\u540d\u79f0": 5, "\u30ad\u30e5\u30fc": 5, "\u6574\u6570": 5, "\u30b3\u30a2\u6570": 5, "\u6570\u5024": 5, "\u7bc4\u56f2": 5, "\u3002ohtaka": 5, "elapsed": [5, 7], "\u6642\u9593": 5, "hh": 5, ":mm": 5, ":ss": 5, "options": 5, "\u30d0\u30c3\u30c1\u30b8\u30e7\u30d6\u30aa\u30d7\u30b7\u30e7\u30f3": 5, "\u5192\u982d": 5, "#pbs": 5, "#sbatch": 5, "\u542b\u3081": 5, "\u6587\u5b57\u5217": 5, "mail": 5, "-type": 5, "=begin": 5, "end": 5, "fail": 5, "-user": 5, "=user": 5, "@sample": 5, "requeue": 5, "- -": 5, "bea": 5, "-m": 5, "user": 5, "-r": 5, "\u3002epilogue": 5, "\u30b7\u30a7\u30eb\u30b9\u30af\u30ea\u30d7\u30c8": [5, 9], "\u8a18\u6cd5": 5, "\u57cb\u3081\u8fbc\u307e": 5, "\u30ad\u30fc": [5, 7], "\u5024\u3068\u3057\u3066": [5, 7], "\u4e26\u5217\u5ea6": 5, "\u30d7\u30ed\u30bb\u30b9": 5, "\u30b9\u30ec\u30c3\u30c9": 5, "\u6e80\u305f": 5, "\u8a70\u3081": 5, "\u4ee5\u4e0a": 5, "\u4f7f\u3046": [5, 6, 7], "\u5360\u6709": 5, "\u9010\u6b21": [5, 7], "\u3002mpi": 5, "/openmp": 5, "\u30cf\u30a4\u30d6\u30ea\u30c3\u30c9\u30d7\u30ed\u30b0\u30e9\u30e0": 5, "prog": 5, "arg": 5, "..": [5, 8, 9], ".]": 5, "\u5b9f\u969b": 5, "\u7f6e\u304d\u63db\u3048": 5, "\u4e00\u884c": 5, ")\u3002": 5, "\u79fb\u52d5": [5, 7, 8], "\u76f4\u4e0b": 5, "\u4eee\u5b9a": 5, "\u30e9\u30a4\u30bb\u30f3\u30b9": 6, "\u958b\u767a": 6, "\u8ca2\u732e": 6, "\u30b3\u30d4\u30fc\u30e9\u30a4\u30c8": 6, "\u52d5\u4f5c": 6, "hphi": [6, 10], "\u306b\u3088\u308b": [6, 10], "dsqss": [6, 10], "\u30d0\u30eb\u30af": 6, "/tutorial": 7, "\u30b5\u30f3\u30d7\u30eb": 7, "\u5b9f\u65bd": 7, "\u3053\u3053\u3067": 7, "\u6307\u3057": 7, "\u306b\u5bfe\u3057": 7, "\u304a\u3044": 7, "\u5358\u4f4d": 7, "\u547c\u3073": 7, "\u524d\u5f8c": 7, "\u540c\u671f": 7, "\u3068\u3089\u308c": 7, "\u301c#": 7, "3\u3064": 7, "\u5bfe\u5fdc": 7, "\u8a18\u8f09": 7, "\u30d5\u30a9\u30fc\u30de\u30c3\u30c8": 7, "\u30c6\u30ad\u30b9\u30c8\u30d5\u30a1\u30a4\u30eb": [7, 8, 9], "\u5f8c\u51e6": 7, "testjob": 7, "file": 7, "8cpu": 7, "module": 7, "purge": 7, "load": 7, "oneapi": 7, "_compiler": 7, "openmpi": 7, "classic": 7, "ulimit": 7, "unlimited": 7, "source": 7, "home": 7, "/materiapps": 7, "/intel": 7, "/parallel": 7, "/parallelvars": 7, "sh": 7, "start": 7, "echo": 7, "hello": 7, "world": 7, ".\"": 7, "result": [7, 8], ".txt": 7, "sleep": 7, "_again": 7, "again": 7, "done": 7, "date": 7, "\u5171\u901a": 7, "\u6700\u521d": 7, "\"start": 7, "\u884c\u308f": 7, "\u3001\"": 7, "\u306e\u3067": 7, "paralle": 7, "\u540c\u69d8": 7, "\u6700\u5f8c": 7, "\u4ed5\u69d8": 7, "\u4e21\u65b9": 7, "\u30b3\u30de\u30f3\u30c9\u30e9\u30a4\u30f3\u30d1\u30e9\u30e1\u30fc\u30bf": 7, "\u306a\u304a": 7, "bash": [7, 8, 9], "\u30b7\u30a7\u30eb": 7, "\u30ed\u30b0\u30a4\u30f3\u30b7\u30a7\u30eb": 7, "csh": 7, "\u6ce8\u610f": 7, "\u500b\u5225": 7, "\u305f\u3068\u3048": 7, "usr": 7, "/bin": 7, "/ls": 7, "1d": 7, "\u30c7\u30fc\u30bf\u30bb\u30c3\u30c8": [7, 8, 9], "\u30e6\u30fc\u30c6\u30a3\u30ea\u30c6\u30a3\u30fc\u30d7\u30ed\u30b0\u30e9\u30e0": 7, "\u4ed8\u5c5e": 7, "make": [7, 8, 9], "_inputs": [7, 8, 9], "\u76f8\u5f53": 7, "dataset": 7, "\u30b3\u30d4\u30fc": [7, 8], "cp": 7, ", \"": 7, "\u3002\u30ed\u30b0": 7, "\u7701\u7565": 7, "\u300co": 7, "\u3001\u300c": 7, "-\u300d": 7, ".\u300d": 7, "\u4eca\u56de": 7, "\u304c\u308f\u304b\u308a": 7, "\u5f8c\u7d9a": 7, "\u308c\u307e\u305b": 7, "\u78ba\u7387": 7, "\u3044\u307e\u305b": 7, "\u6210\u529f": 7, "\u72ec\u7acb": 7, "\u3064\u3051": 7, "\u5909\u66f4": 7, "\u518d\u6295": 7, "\u4e0a\u8a18": 7, "\u3002dataset": 7, "\u3002task": 7, "\u3067\u304f": 7, "\u9806\u756a": 7, "\u7ba1\u7406": 7, "\u3053\u308c": [8, 9], "\u91cf\u5b50": [8, 9], "\u591a\u4f53": [8, 9], "\u554f\u984c": [8, 9], "\u7d4c\u8def": 8, "\u7a4d\u5206": 8, "\u30e2\u30f3\u30c6\u30ab\u30eb\u30ed": 8, "\u30aa\u30fc\u30d7\u30f3\u30bd\u30fc\u30b9\u30bd\u30d5\u30c8\u30a6\u30a7\u30a2\u30d1\u30c3\u30b1\u30fc\u30b8": [8, 9], "\u5468\u671f": [8, 9], "\u5883\u754c": [8, 9], "(dsqss": 8, "\u7528\u8a9e": 8, "\u53cd\u5f37": [8, 9], "\u78c1\u6027": [8, 9], "\u30cf\u30a4\u30bc\u30f3\u30d9\u30eb\u30af": [8, 9], "\u78c1\u6c17": 8, "\u611f\u53d7": 8, "\\chi": 8, "\u6e29\u5ea6": 8, "\u30d1\u30e9\u30e1\u30fc\u30bf\u30fc": 8, "\u516c\u5f0f": [8, 9], "\u306b\u5bfe\u5fdc": [8, 9], "\uff08htp": [8, 9], "\u30d1\u30c3\u30b1\u30fc\u30b8": [8, 9], "issp": [8, 9], "\uff08\u3059": 8, "\u5b58\u5728": 8, "\u524a\u9664": 8, "\u518d\u5ea6": 8, "\uff09\u3002": 8, "\u4f5c\u696d": [8, 9], "\uff08\u4f8b": 8, "\u4fdd\u5b58": [8, 9], "\u30b8\u30e7\u30d6\u30ea\u30b9\u30c8": [8, 9], "\u9001\u4fe1": [8, 9], "\u72b6\u614b": [8, 9], "\u96c6\u3081\u308b": [8, 9], "\u96c6\u3081": [8, 9], "/extract": 8, "_result": 8, "\u5e73\u5747": 8, "\u8aa4\u5dee": 8, "\u66f8\u304d\u8fbc\u307f": [8, 9], "\u8996\u899a": [8, 9], "gnuplot": [8, 9], "plot": 8, "_m": 8, "plt": 8, "persist": [8, 9], "afh": 8, "\u9055\u3044": 8, "\u52b1\u8d77": [8, 9], "\u30ae\u30e3\u30c3\u30d7": [8, 9], "\u6d88\u5931": 8, "\u6b8b\u308b": 8, "\u306e\u3069\u3061\u3089\u304b": 8, "\u53cd\u6620": 8, "\u975e\u5e38": 8, "\u4f4e\u6e29": 8, "\u9818\u57df": 8, "\u6709\u9650": [8, 9], "\u3069\u3061\u3089\u304b": 8, "\u30b5\u30a4\u30ba": [8, 9], "\u52b9\u679c": [8, 9], "\u30b9\u30d4\u30f3\u30ae\u30e3\u30c3\u30d7": [8, 9], "\u958b\u304d": 8, "\u30c1\u30a7\u30fc\u30f3": 8, "\u4f4e\u4e0b": 8, "\u6b63\u78ba": 9, "\u5bfe\u89d2": 9, "\u5316\u65b9": 9, "2s": 9, "\\delta": 9, "\u30b7\u30b9\u30c6\u30e0\u30b5\u30a4\u30ba": 9, ".,": 9, "\u30a8\u30cd\u30eb\u30ae\u30fc\u30ae\u30e3\u30c3\u30d7": 9, "extract": 9, "_gap": 9, "\u9577\u3055": 9, "\u30da\u30a2": 9, "gap": 9, ".plt": 9, "\u30ae\u30e3\u30c3\u30d7\u30c7\u30fc\u30bf": 9, "\u4e88\u60f3": 9, "\u66f2\u7dda": 9, "\u30d5\u30a3\u30c3\u30c8": 9, "(l": 9, "= \\": 9, "delta": 9, "_\\": 9, "infty": 9, "/l": 9, "\\exp": 9, "(-": 9, "cl": 9, ").": 9, "\u30b0\u30e9\u30d5": 9, "\u63cf\u753b": 9, "\u5bfe\u6570": 9, "\u88dc\u6b63": 9, "\u307e\u307e\u3067\u3059": 9, "\u5916\u633f": 9, "\u4ee5\u524d": 9, "\u4f8b\u3048": 9, "qmc": 9, "\uff08todo": 9, "and": 9, "kato": 9, "prl": 9, ")\uff09": 9, "\u3068\u3088\u304f": 9, "\u3042\u3063": 9, "\u5931\u6557": 10}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"moller": [0, 1, 2, 4, 6, 8, 9], "users": 0, "guide": 0, "contents": 0, "\u6982\u8981": [1, 2], "\u30e9\u30a4\u30bb\u30f3\u30b9": 1, "\u958b\u767a": 1, "\u8ca2\u732e": 1, "\u30b3\u30d4\u30fc\u30e9\u30a4\u30c8": 1, "\u52d5\u4f5c": [1, 2], "\u74b0\u5883": 1, "\u62e1\u5f35": 2, "\u30ac\u30a4\u30c9": 2, "\u306b\u3088\u308b": [2, 8, 9], "\u30d0\u30eb\u30af": 2, "\u5b9f\u884c": [2, 7, 8, 9], "\u306b\u3064\u3044\u3066": [2, 8, 9], "\u751f\u6210": [2, 7], "\u308c\u308b": 2, "\u30b9\u30af\u30ea\u30d7\u30c8": 2, "\u69cb\u6210": [2, 5, 7], "script": 2, "\u95a2\u6570": 2, "\u30b7\u30b9\u30c6\u30e0": 2, "\u4f7f\u3046": 2, "\u30af\u30e9\u30b9": 2, "slurm": 2, "\u30b8\u30e7\u30d6\u30b9\u30b1\u30b8\u30e5\u30fc\u30e9": 2, "pbs": 2, "\u7d30\u304b\u3044": 2, "\u30ab\u30b9\u30bf\u30de\u30a4\u30ba": 2, "\u5fc5\u8981": 2, "\u5834\u5408": 2, "\u65b0\u3057\u3044": 2, "\u30bf\u30a4\u30d7": 2, "\u5bfe\u5fdc": 2, "\u305b\u308b": 2, "\u30c8\u30e9\u30d6\u30eb\u30b7\u30e5\u30fc\u30c6\u30a3\u30f3\u30b0": 2, "\u30a4\u30f3\u30b9\u30c8\u30fc\u30eb": 3, "\u57fa\u672c": [3, 7], "\u4f7f\u3044\u65b9": [3, 7], "\u30b3\u30de\u30f3\u30c9\u30ea\u30d5\u30a1\u30ec\u30f3\u30b9": 4, "_status": 4, "\u30d5\u30a1\u30a4\u30eb\u30d5\u30a9\u30fc\u30de\u30c3\u30c8": 5, "\u5b9a\u7fa9": [5, 7], "\u30d5\u30a1\u30a4\u30eb": [5, 7], "\u5168\u4f53": 5, "platform": 5, "prologue": 5, "epilogue": 5, "jobs": 5, "\u30ea\u30b9\u30c8\u30d5\u30a1\u30a4\u30eb": [5, 7], "\u7db2\u7f85": [6, 7], "\u8a08\u7b97": [6, 7, 8, 9], "\u30c4\u30fc\u30eb": 6, "\u4f5c\u6210": 7, "\u3059\u308b": 7, "\u30d0\u30c3\u30c1\u30b8\u30e7\u30d6\u30b9\u30af\u30ea\u30d7\u30c8": 7, "\u72b6\u6cc1": 7, "\u78ba\u8a8d": 7, "\u5931\u6557": 7, "\u30bf\u30b9\u30af": 7, "\u518d\u5b9f\u884c": 7, "dsqss": 8, "\u3053\u306e": [8, 9], "\u30c1\u30e5\u30fc\u30c8\u30ea\u30a2\u30eb": [8, 9, 10], "\u6e96\u5099": [8, 9], "\u65b9\u6cd5": [8, 9], "hphi": 9}, "envversion": {"sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx": 60}, "alltitles": {"Moller Users Guide": [[0, "moller-users-guide"]], "Contents:": [[0, null]], "\u6982\u8981": [[1, "id1"]], "moller\u3068\u306f?": [[1, "moller"]], "\u30e9\u30a4\u30bb\u30f3\u30b9": [[1, "id2"]], "\u958b\u767a\u8ca2\u732e\u8005": [[1, "id3"]], "\u30b3\u30d4\u30fc\u30e9\u30a4\u30c8": [[1, "id4"]], "\u52d5\u4f5c\u74b0\u5883": [[1, "id5"]], "\u62e1\u5f35\u30ac\u30a4\u30c9": [[2, "id1"]], "moller\u306b\u3088\u308b\u30d0\u30eb\u30af\u5b9f\u884c": [[2, "moller"]], "moller\u306e\u52d5\u4f5c\u306b\u3064\u3044\u3066": [[2, "id2"]], "moller\u3067\u751f\u6210\u3055\u308c\u308b\u30b9\u30af\u30ea\u30d7\u30c8\u306e\u69cb\u6210": [[2, "id3"]], "moller script\u306e\u95a2\u6570\u306e\u6982\u8981": [[2, "moller-script"]], "moller\u3092\u4ed6\u306e\u30b7\u30b9\u30c6\u30e0\u3067\u4f7f\u3046\u306b\u306f": [[2, "id4"]], "\u30af\u30e9\u30b9\u69cb\u6210": [[2, "id5"]], "SLURM\u7cfb\u30b8\u30e7\u30d6\u30b9\u30b1\u30b8\u30e5\u30fc\u30e9": [[2, "slurm"]], "PBS\u7cfb\u30b8\u30e7\u30d6\u30b9\u30b1\u30b8\u30e5\u30fc\u30e9": [[2, "pbs"]], "\u7d30\u304b\u3044\u30ab\u30b9\u30bf\u30de\u30a4\u30ba\u304c\u5fc5\u8981\u306a\u5834\u5408": [[2, "id6"]], "\u65b0\u3057\u3044\u30bf\u30a4\u30d7\u306e\u30b8\u30e7\u30d6\u30b9\u30b1\u30b8\u30e5\u30fc\u30e9\u306b\u5bfe\u5fdc\u3055\u305b\u308b\u306b\u306f": [[2, "id7"]], "\u30c8\u30e9\u30d6\u30eb\u30b7\u30e5\u30fc\u30c6\u30a3\u30f3\u30b0": [[2, "id8"]], "\u30a4\u30f3\u30b9\u30c8\u30fc\u30eb\u3068\u57fa\u672c\u7684\u306a\u4f7f\u3044\u65b9": [[3, "id1"]], "\u30b3\u30de\u30f3\u30c9\u30ea\u30d5\u30a1\u30ec\u30f3\u30b9": [[4, "id1"]], "moller": [[4, "moller"]], "moller_status": [[4, "moller-status"]], "\u30d5\u30a1\u30a4\u30eb\u30d5\u30a9\u30fc\u30de\u30c3\u30c8": [[5, "sec-fileformat"]], "\u69cb\u6210\u5b9a\u7fa9\u30d5\u30a1\u30a4\u30eb": [[5, "id2"]], "\u5168\u4f53": [[5, "id3"]], "platform": [[5, "platform"]], "prologue, epilogue": [[5, "prologue-epilogue"]], "jobs": [[5, "jobs"]], "\u30ea\u30b9\u30c8\u30d5\u30a1\u30a4\u30eb": [[5, "id4"]], "\u7db2\u7f85\u8a08\u7b97\u30c4\u30fc\u30eb (moller)": [[6, "moller"]], "\u57fa\u672c\u7684\u306a\u4f7f\u3044\u65b9": [[7, "sec-tutorial"]], "\u69cb\u6210\u5b9a\u7fa9\u30d5\u30a1\u30a4\u30eb\u3092\u4f5c\u6210\u3059\u308b": [[7, "id2"]], "\u30d0\u30c3\u30c1\u30b8\u30e7\u30d6\u30b9\u30af\u30ea\u30d7\u30c8\u3092\u751f\u6210\u3059\u308b": [[7, "id3"]], "\u30ea\u30b9\u30c8\u30d5\u30a1\u30a4\u30eb\u3092\u4f5c\u6210\u3059\u308b": [[7, "id4"]], "\u7db2\u7f85\u8a08\u7b97\u3092\u5b9f\u884c\u3059\u308b": [[7, "id5"]], "\u5b9f\u884c\u72b6\u6cc1\u3092\u78ba\u8a8d\u3059\u308b": [[7, "id6"]], "\u5931\u6557\u3057\u305f\u30bf\u30b9\u30af\u3092\u518d\u5b9f\u884c\u3059\u308b": [[7, "id7"]], "DSQSS \u306b\u3088\u308b moller \u8a08\u7b97\u306e\u4f8b": [[8, "dsqss-moller"]], "\u3053\u306e\u30c1\u30e5\u30fc\u30c8\u30ea\u30a2\u30eb\u306b\u3064\u3044\u3066": [[8, "id1"], [9, "id1"]], "\u6e96\u5099": [[8, "id2"], [9, "id2"]], "\u5b9f\u884c\u65b9\u6cd5": [[8, "id3"], [9, "id3"]], "HPhi \u306b\u3088\u308b moller \u8a08\u7b97\u306e\u4f8b": [[9, "hphi-moller"]], "\u30c1\u30e5\u30fc\u30c8\u30ea\u30a2\u30eb": [[10, "sec-tutorial"]]}, "indexentries": {}}) \ No newline at end of file diff --git a/manual/v1.0.0/ja/moller-usersguide.pdf b/manual/v1.0.0/ja/moller-usersguide.pdf new file mode 100644 index 0000000..2df20cb Binary files /dev/null and b/manual/v1.0.0/ja/moller-usersguide.pdf differ