Skip to content

Commit

Permalink
initialize sphinx-gallery
Browse files Browse the repository at this point in the history
  • Loading branch information
agoscinski committed Sep 12, 2024
1 parent db2e36a commit ddbc440
Show file tree
Hide file tree
Showing 10 changed files with 1,250 additions and 1,592 deletions.
78 changes: 73 additions & 5 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
"sphinx_copybutton",
"sphinx_panels",
"sphinx_tabs.tabs",
"sphinx_gallery.gen_gallery"
]

myst_enable_extensions = [
Expand Down Expand Up @@ -399,18 +400,39 @@ def setup(app):
app.add_css_file("css/custom.css")


# we don't want to run the notebook during a linkcheck

nb_execution_mode = "off" if os.getenv("SPHINX_LINKCHECK") is None else "auto"
print("nb_execution_mode:", nb_execution_mode)
nb_execution_excludepatterns = ["querying.ipynb"]
# we don't want to run the notebook
nb_execution_mode = "off"

# Intersphinx configuration
intersphinx_mapping = {
"aiida": ("https://aiida.readthedocs.io/projects/aiida-core/en/latest/", None),
"plumpy": ("https://plumpy.readthedocs.io/en/latest/", None),
}

gallery_src_relative_dir = (
"gallery" # relative path of the gallery src wrt. sphinx src
)
sphinx_src_autogen_dirs = ["sections/writing_workflows_with_workgraph/autogen"]
# we mimik the structure in the sphinx src directory in the gallery src directory

# path of python scripts that should be executed
gallery_src_dirs = [
os.path.join(gallery_src_relative_dir, autogen_dir)
for autogen_dir in sphinx_src_autogen_dirs
]
sphinx_gallery_conf = {
"filename_pattern": "/*",
"examples_dirs": gallery_src_dirs, # in sphinx-gallery doc referred as gallery source
"gallery_dirs": sphinx_src_autogen_dirs, # path to where to gallery puts generated files
}

# ignore in the autogenerated ipynb files to surpress warning
exclude_patterns.extend(
[
os.path.join(sphinx_src_autogen_dir, "*ipynb")
for sphinx_src_autogen_dir in sphinx_src_autogen_dirs
]
)

# Compile all things needed before building the docs
# For instance, convert the notebook templates to actual tutorial and solution versions
Expand All @@ -420,3 +442,49 @@ def setup(app):
universal_newlines=True,
)
)

def copy_html_files(app, exception):
"""
Copy all .html files from source to build directory, maintaining the directory structure.
"""
print("Copying HTML files to build directory")
copy_print_info = "Copying HTML files to build directory"
print()
print(copy_print_info)
print(len(copy_print_info) * "=")
if exception is not None: # Only copy files if the build succeeded
print(
"Build failed, but we still try to copy the HTML files to the build directory"
)
try:
src_path = Path(app.builder.srcdir)
build_path = Path(app.builder.outdir)

copy_print_info = f"Copying html files from sphinx src directory {src_path}"
print()
print(copy_print_info)
print(len(copy_print_info) * "-")
for html_file in src_path.rglob("*.html"):
relative_path = html_file.relative_to(src_path)
destination_file = build_path / relative_path
destination_file.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(html_file, destination_file)
print(f"Copy {html_file} to {destination_file}")

gallery_src_path = Path(app.builder.srcdir / Path(gallery_src_relative_dir))

copy_print_info = (
f"Copying html files from gallery src directory {gallery_src_path} to build"
)
print()
print(copy_print_info)
print(len(copy_print_info) * "-")
for html_file in gallery_src_path.rglob("*.html"):
relative_path = html_file.relative_to(gallery_src_path)
destination_file = build_path / relative_path
destination_file.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(html_file, destination_file)
print(f"Copy {html_file} to {destination_file}")
except Exception as e:
print(f"Failed to copy HTML files: {e}")

Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
================================
Running workflows with WorkGraph
================================
283 changes: 283 additions & 0 deletions docs/gallery/sections/writing_workflows_with_workgraph/autogen/eos.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,283 @@
"""
==================================
Equation of state (EOS) WorkGraph
==================================
"""

# %%
# To run this tutorial, you need to install aiida-workgraph and restart the daemon. Open a terminal and run:
#
# .. code-block:: console
#
# pip install aiida-workgraph[widget] aiida-quantumespresso
#
# Restart (or start) the AiiDA daemon if needed:
#
# .. code-block:: console
#
# verdi daemon restart
#
# Create the calcfunction task
# ============================
#


from aiida import orm
from aiida_workgraph import task

#
# explicitly define the output socket name to match the return value of the function
@task.calcfunction(outputs=[{"name": "structures"}])
def scale_structure(structure, scales):
"""Scale the structure by the given scales."""
atoms = structure.get_ase()
structures = {}
for i in range(len(scales)):
atoms1 = atoms.copy()
atoms1.set_cell(atoms.cell * scales[i], scale_atoms=True)
structure = orm.StructureData(ase=atoms1)
structures[f"s_{i}"] = structure
return {"structures": structures}


#
# Output result from context to the output socket
@task.graph_builder(outputs=[{"name": "result", "from": "context.result"}])
def all_scf(structures, scf_inputs):
"""Run the scf calculation for each structure."""
from aiida_workgraph import WorkGraph
from aiida_quantumespresso.calculations.pw import PwCalculation

wg = WorkGraph()
for key, structure in structures.items():
pw1 = wg.add_task(PwCalculation, name=f"pw1_{key}", structure=structure)
pw1.set(scf_inputs)
# save the output parameters to the context
pw1.set_context({"output_parameters": f"result.{key}"})
return wg


#


@task.calcfunction()
# because this is a calcfunction, and the input datas are dynamic, we need use **datas.
def eos(**datas):
"""Fit the EOS of the data."""
from ase.eos import EquationOfState

#
volumes = []
energies = []
for _, data in datas.items():
volumes.append(data.dict.volume)
energies.append(data.dict.energy)
unit = data.dict.energy_units
#
eos = EquationOfState(volumes, energies)
v0, e0, B = eos.fit()
eos = orm.Dict({"unit": unit, "v0": v0, "e0": e0, "B": B})
return eos


# %%
# Build the workgraph
# ===================
# Three steps:
#
# - create an empty WorkGraph
# - add tasks: scale_structure, all_scf and eos.
# - link the output and input sockets for the tasks.
#
# Visualize the workgraph
# -----------------------
# If you are running in a jupyter notebook, you can visualize the workgraph directly.
#

from aiida_workgraph import WorkGraph

#
wg = WorkGraph("eos")
scale_structure1 = wg.add_task(scale_structure, name="scale_structure1")
all_scf1 = wg.add_task(all_scf, name="all_scf1")
eos1 = wg.add_task(eos, name="eos1")
wg.add_link(scale_structure1.outputs["structures"], all_scf1.inputs["structures"])
wg.add_link(all_scf1.outputs["result"], eos1.inputs["datas"])
wg.to_html()
# visualize the workgraph in jupyter-notebook
# wg


# %%
# Prepare inputs and run
# ----------------------
#


from aiida import load_profile
from aiida.common.exceptions import NotExistent
from aiida.orm import (
Dict,
KpointsData,
StructureData,
load_code,
load_group,
InstalledCode,
load_computer,
)
from ase.build import bulk

#
load_profile()
# create pw code
try:
pw_code = load_code(
"qe-7.2-pw@localhost"
) # The computer label can also be omitted here
except NotExistent:
pw_code = InstalledCode(
computer=load_computer("localhost"),
filepath_executable="pw.x",
label="qe-7.2-pw",
default_calc_job_plugin="quantumespresso.pw",
).store()
#
si = orm.StructureData(ase=bulk("Si"))
pw_paras = Dict(
{
"CONTROL": {
"calculation": "scf",
},
"SYSTEM": {
"ecutwfc": 30,
"ecutrho": 240,
"occupations": "smearing",
"smearing": "gaussian",
"degauss": 0.1,
},
}
)
# Load the pseudopotential family.
pseudo_family = load_group("SSSP/1.3/PBEsol/efficiency")
pseudos = pseudo_family.get_pseudos(structure=si)
#
metadata = {
"options": {
"resources": {
"num_machines": 1,
"num_mpiprocs_per_machine": 1,
},
}
}
#
kpoints = orm.KpointsData()
kpoints.set_kpoints_mesh([3, 3, 3])
pseudos = pseudo_family.get_pseudos(structure=si)
scf_inputs = {
"code": pw_code,
"parameters": pw_paras,
"kpoints": kpoints,
"pseudos": pseudos,
"metadata": metadata,
}
# -------------------------------------------------------
# set the input parameters for each task
wg.tasks["scale_structure1"].set({"structure": si, "scales": [0.95, 1.0, 1.05]})
wg.tasks["all_scf1"].set({"scf_inputs": scf_inputs})
print("Waiting for the workgraph to finish...")
wg.submit(wait=True, timeout=300)
# one can also run the workgraph directly
# wg.run()


# %%
# Print out the results:
#


data = wg.tasks["eos1"].outputs["result"].value.get_dict()
print("B: {B}\nv0: {v0}\ne0: {e0}\nv0: {v0}".format(**data))

# %%
# Use graph builder
# =================
# The Graph Builder allow user to create a dynamic workflow based on the input value, as well as nested workflows.
#

from aiida_workgraph import WorkGraph, task

#
@task.graph_builder(outputs=[{"name": "result", "from": "eos1.result"}])
def eos_workgraph(structure=None, scales=None, scf_inputs=None):
wg = WorkGraph("eos")
scale_structure1 = wg.add_task(
scale_structure, name="scale_structure1", structure=structure, scales=scales
)
all_scf1 = wg.add_task(all_scf, name="all_scf1", scf_inputs=scf_inputs)
eos1 = wg.add_task(eos, name="eos1")
wg.add_link(scale_structure1.outputs["structures"], all_scf1.inputs["structures"])
wg.add_link(all_scf1.outputs["result"], eos1.inputs["datas"])
return wg


# %%
# Then we can use the `eos_workgraph` in two ways:
#
# - Direct run the function and generate the workgraph, then submit
# - Use it as a task inside another workgraph to create nested workflow.
#
# Use the graph builder directly
# ------------------------------
#

wg = eos_workgraph(structure=si, scales=[0.95, 1.0, 1.05], scf_inputs=scf_inputs)
# One can submit the workgraph directly
# wg.submit(wait=True, timeout=300)
wg.to_html()
# visualize the workgraph in jupyter-notebook
# wg

# %%
# Use it inside another workgraph
# -------------------------------
# For example, we want to combine relax with eos.
#


from aiida_workgraph import WorkGraph
from copy import deepcopy
from aiida_quantumespresso.calculations.pw import PwCalculation

#
# -------------------------------------------------------
relax_pw_paras = deepcopy(pw_paras)
relax_pw_paras["CONTROL"]["calculation"] = "vc-relax"
relax_inputs = {
"structure": si,
"code": pw_code,
"parameters": relax_pw_paras,
"kpoints": kpoints,
"pseudos": pseudos,
"metadata": metadata,
}
# -------------------------------------------------------
wg = WorkGraph("relax_eos")
relax_task = wg.add_task(PwCalculation, name="relax1")
relax_task.set(relax_inputs)
eos_wg_task = wg.add_task(
eos_workgraph, name="eos1", scales=[0.95, 1.0, 1.05], scf_inputs=scf_inputs
)
wg.add_link(relax_task.outputs["output_structure"], eos_wg_task.inputs["structure"])
# -------------------------------------------------------
# One can submit the workgraph directly
# wg.submit(wait=True, timeout=300)

wg.to_html()
# visualize the workgraph in jupyter-notebook
# wg

# %%
# Summary
# =======
# There are many ways to create the workflow using graph builder. For example, one can add the relax step inside the `eos_workgraph`, and add a `run_relax` argument to control the logic.
Loading

0 comments on commit ddbc440

Please sign in to comment.