diff --git a/.gitignore b/.gitignore index b6e4761..487a3d7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,8 @@ +# autogenerated by dephell +setup.py +# autogenerated by poetry2conda +py4vasp-env.yml + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..53f63ac --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,57 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = "py4vasp" +copyright = "2020, Vasp Software GmbH" +author = "Vasp Software GmbH" + +# The full version, including alpha/beta/rc tags +release = "0.1" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon"] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "nature" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# remove common py4vasp prefix from index +modindex_common_prefix = ["py4vasp."] diff --git a/docs/data_api.rst b/docs/data_api.rst new file mode 100644 index 0000000..79ad29f --- /dev/null +++ b/docs/data_api.rst @@ -0,0 +1,6 @@ +Refining the raw data +===================== + +.. automodule:: py4vasp.data + :members: + :undoc-members: diff --git a/docs/exceptions_api.rst b/docs/exceptions_api.rst new file mode 100644 index 0000000..6dfb89e --- /dev/null +++ b/docs/exceptions_api.rst @@ -0,0 +1,6 @@ +Exceptions +========== + +.. automodule:: py4vasp.exceptions + :members: + :undoc-members: diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..20d440c --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,29 @@ +py4vasp +======= + +*py4vasp* is a python interface to extract data from Vasp calculations. It is +intended mainly to get a quick look at the data and provide functionality to +export it into common formats that can be used by other more sophisticated +postprocessing tools. A second domain of application is for people that want to +write python scripts based on the data calculated by Vasp. This tool interfaces +directly with the new HDF5 file format and thereby avoids parsing issues +associated with the XML or OUTCAR files. + +For these two groups of users, we provide a different level of access. The +simple routines used in the tutorials will read the data from the file directly +and then generate the requested plot. For script developers, we provide an +expert interface were the data is lazily loaded as needed with some greater +flexibility when the data file is opened and closed. + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + modules + +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..922152e --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/modules.rst b/docs/modules.rst new file mode 100644 index 0000000..d854578 --- /dev/null +++ b/docs/modules.rst @@ -0,0 +1,10 @@ +API documentation +================= + +.. toctree:: + :maxdepth: 2 + :caption: Modules: + + raw_api + data_api + exceptions_api diff --git a/docs/raw_api.rst b/docs/raw_api.rst new file mode 100644 index 0000000..79c8c3c --- /dev/null +++ b/docs/raw_api.rst @@ -0,0 +1,6 @@ +Extracting the raw data +======================= + +.. automodule:: py4vasp.raw + :members: + :undoc-members: diff --git a/poetry.lock b/poetry.lock index 8280192..b82283d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,11 @@ +[[package]] +category = "dev" +description = "A configurable sidebar-enabled Sphinx theme" +name = "alabaster" +optional = false +python-versions = "*" +version = "0.7.12" + [[package]] category = "dev" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." @@ -37,6 +45,17 @@ dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.int docs = ["sphinx", "zope.interface"] tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +[[package]] +category = "dev" +description = "Internationalization utilities" +name = "babel" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.8.0" + +[package.dependencies] +pytz = ">=2015.7" + [[package]] category = "main" description = "Specifications for callback functions passed in to an API" @@ -175,6 +194,14 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" version = "0.6.0" +[[package]] +category = "dev" +description = "Docutils -- Python Documentation Utilities" +name = "docutils" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.16" + [[package]] category = "main" description = "Discover and load entry points from installed packages." @@ -203,6 +230,14 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" version = "2.8" +[[package]] +category = "dev" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +name = "imagesize" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.2.0" + [[package]] category = "main" description = "Read metadata from Python packages" @@ -482,6 +517,18 @@ optional = false python-versions = ">=3.5" version = "1.17.4" +[[package]] +category = "dev" +description = "Core utilities for Python packages" +name = "packaging" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "20.4" + +[package.dependencies] +pyparsing = ">=2.0.2" +six = "*" + [[package]] category = "main" description = "Powerful data structures for data analysis, time series, and statistics" @@ -590,7 +637,7 @@ wcwidth = "*" [[package]] category = "main" description = "Run a subprocess in a pseudo terminal" -marker = "sys_platform != \"win32\" or os_name != \"nt\" or python_version >= \"3.3\" and sys_platform != \"win32\"" +marker = "python_version >= \"3.3\" and sys_platform != \"win32\" or sys_platform != \"win32\" or os_name != \"nt\" or python_version >= \"3.3\" and sys_platform != \"win32\" and (python_version >= \"3.3\" and sys_platform != \"win32\" or sys_platform != \"win32\")" name = "ptyprocess" optional = false python-versions = "*" @@ -612,6 +659,14 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" version = "2.5.2" +[[package]] +category = "dev" +description = "Python parsing module" +name = "pyparsing" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.4.7" + [[package]] category = "main" description = "Persistent/Functional/Immutable data structures" @@ -746,6 +801,117 @@ optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*" version = "1.13.0" +[[package]] +category = "dev" +description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." +name = "snowballstemmer" +optional = false +python-versions = "*" +version = "2.0.0" + +[[package]] +category = "dev" +description = "Python documentation generator" +name = "sphinx" +optional = false +python-versions = ">=3.5" +version = "3.1.1" + +[package.dependencies] +Jinja2 = ">=2.3" +Pygments = ">=2.0" +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = ">=0.3.5" +docutils = ">=0.12" +imagesize = "*" +packaging = "*" +requests = ">=2.5.0" +setuptools = "*" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = "*" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = "*" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-stubs"] +test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] + +[[package]] +category = "dev" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +name = "sphinxcontrib-applehelp" +optional = false +python-versions = ">=3.5" +version = "1.0.2" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +category = "dev" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +name = "sphinxcontrib-devhelp" +optional = false +python-versions = ">=3.5" +version = "1.0.2" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +category = "dev" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +name = "sphinxcontrib-htmlhelp" +optional = false +python-versions = ">=3.5" +version = "1.0.3" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest", "html5lib"] + +[[package]] +category = "dev" +description = "A sphinx extension which renders display math in HTML via JavaScript" +name = "sphinxcontrib-jsmath" +optional = false +python-versions = ">=3.5" +version = "1.0.1" + +[package.extras] +test = ["pytest", "flake8", "mypy"] + +[[package]] +category = "dev" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +name = "sphinxcontrib-qthelp" +optional = false +python-versions = ">=3.5" +version = "1.0.3" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +category = "dev" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +name = "sphinxcontrib-serializinghtml" +optional = false +python-versions = ">=3.5" +version = "1.1.4" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + [[package]] category = "main" description = "Terminals served to xterm.js using Tornado websockets" @@ -859,10 +1025,14 @@ docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] testing = ["pathlib2", "contextlib2", "unittest2"] [metadata] -content-hash = "95b1ff02604111b941a554ff224ffa69b9ed5c3e23c0da75c326b550b556b717" +content-hash = "291edd704643ec97bbc2ad0531fb5e36a590f28f17ac8685a9867938ba3caeca" python-versions = "^3.7" [metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] appdirs = [ {file = "appdirs-1.4.3-py2.py3-none-any.whl", hash = "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e"}, {file = "appdirs-1.4.3.tar.gz", hash = "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92"}, @@ -879,6 +1049,10 @@ attrs = [ {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"}, {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"}, ] +babel = [ + {file = "Babel-2.8.0-py2.py3-none-any.whl", hash = "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"}, + {file = "Babel-2.8.0.tar.gz", hash = "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38"}, +] backcall = [ {file = "backcall-0.1.0.tar.gz", hash = "sha256:38ecd85be2c1e78f77fd91700c76e14667dc21e2713b63876c0eb901196e01e4"}, {file = "backcall-0.1.0.zip", hash = "sha256:bbbf4b1e5cd2bdb08f915895b51081c041bac22394fdfcfdfbe9f14b77c08bf2"}, @@ -889,6 +1063,7 @@ black = [ ] bleach = [ {file = "bleach-3.1.0-py2.py3-none-any.whl", hash = "sha256:213336e49e102af26d9cde77dd2d0397afabc5a6bf2fed985dc35b5d1e285a16"}, + {file = "bleach-3.1.0-py3.8.egg", hash = "sha256:4ca3ec10244c9f11ec129b054912e8bc9ecefc2e0b6bf0dab273f0e72cf381e4"}, {file = "bleach-3.1.0.tar.gz", hash = "sha256:3fdf7f77adcf649c9911387df51254b813185e32b2c6619f690b593a617e19fa"}, ] certifi = [ @@ -959,6 +1134,10 @@ defusedxml = [ {file = "defusedxml-0.6.0-py2.py3-none-any.whl", hash = "sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93"}, {file = "defusedxml-0.6.0.tar.gz", hash = "sha256:f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5"}, ] +docutils = [ + {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, + {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, +] entrypoints = [ {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, @@ -998,6 +1177,10 @@ idna = [ {file = "idna-2.8-py2.py3-none-any.whl", hash = "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"}, {file = "idna-2.8.tar.gz", hash = "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"}, ] +imagesize = [ + {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, + {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, +] importlib-metadata = [ {file = "importlib_metadata-1.3.0-py2.py3-none-any.whl", hash = "sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"}, {file = "importlib_metadata-1.3.0.tar.gz", hash = "sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45"}, @@ -1066,6 +1249,11 @@ markupsafe = [ {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] mistune = [ @@ -1111,6 +1299,10 @@ numpy = [ {file = "numpy-1.17.4-cp38-cp38-win_amd64.whl", hash = "sha256:ada4805ed51f5bcaa3a06d3dd94939351869c095e30a2b54264f5a5004b52170"}, {file = "numpy-1.17.4.zip", hash = "sha256:f58913e9227400f1395c7b800503ebfdb0772f1c33ff8cb4d6451c06cabdf316"}, ] +packaging = [ + {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, + {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, +] pandas = [ {file = "pandas-0.25.3-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:df8864824b1fe488cf778c3650ee59c3a0d8f42e53707de167ba6b4f7d35f133"}, {file = "pandas-0.25.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7458c48e3d15b8aaa7d575be60e1e4dd70348efcd9376656b72fecd55c59a4c3"}, @@ -1174,6 +1366,10 @@ pygments = [ {file = "Pygments-2.5.2-py2.py3-none-any.whl", hash = "sha256:2a3fe295e54a20164a9df49c75fa58526d3be48e14aceba6d6b1e8ac0bfd6f1b"}, {file = "Pygments-2.5.2.tar.gz", hash = "sha256:98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe"}, ] +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] pyrsistent = [ {file = "pyrsistent-0.15.6.tar.gz", hash = "sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b"}, ] @@ -1264,6 +1460,38 @@ six = [ {file = "six-1.13.0-py2.py3-none-any.whl", hash = "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd"}, {file = "six-1.13.0.tar.gz", hash = "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"}, ] +snowballstemmer = [ + {file = "snowballstemmer-2.0.0-py2.py3-none-any.whl", hash = "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0"}, + {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"}, +] +sphinx = [ + {file = "Sphinx-3.1.1-py3-none-any.whl", hash = "sha256:97c9e3bcce2f61d9f5edf131299ee9d1219630598d9f9a8791459a4d9e815be5"}, + {file = "Sphinx-3.1.1.tar.gz", hash = "sha256:74fbead182a611ce1444f50218a1c5fc70b6cc547f64948f5182fb30a2a20258"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-1.0.3.tar.gz", hash = "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"}, + {file = "sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.4.tar.gz", hash = "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc"}, + {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, +] terminado = [ {file = "terminado-0.8.3-py2.py3-none-any.whl", hash = "sha256:a43dcb3e353bc680dd0783b1d9c3fc28d529f190bc54ba9a229f72fe6e7a54d7"}, {file = "terminado-0.8.3.tar.gz", hash = "sha256:4804a774f802306a7d9af7322193c5390f1da0abb429e082a10ef1d46e6fb2c2"}, diff --git a/pyproject.toml b/pyproject.toml index 6d54eb4..a44664c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,24 @@ cufflinks = "^0.17.0" pytest = "^3.0" black = {version = "^18.3-alpha.0", allows-prereleases = true} pytest-cov = "^2.8.1" +sphinx = "^3.0.4" + +[tool.poetry2conda] +name = "py4vasp-env" + +[tool.poetry2conda.dependencies] +python = {channel = "anaconda"} +h5py = {channel = "anaconda"} +numpy = {channel = "anaconda"} +pandas = {channel = "anaconda"} +cufflinks = {channel = "conda-forge", name = "cufflinks-py"} +mdtraj = {channel = "conda-forge"} +sphinx = {channel = "anaconda"} + [build-system] requires = ["poetry>=1.0"] build-backend = "poetry.masonry.api" + +[tool.dephell.main] +from = {format = "poetry", path = "pyproject.toml"} +to = {format = "setuppy", path = "setup.py"} diff --git a/src/py4vasp/data/__init__.py b/src/py4vasp/data/__init__.py index 6a147f8..6ff7f33 100644 --- a/src/py4vasp/data/__init__.py +++ b/src/py4vasp/data/__init__.py @@ -1,12 +1,36 @@ +""" Refine the raw data produced by Vasp for plotting or analysis. + +Usually one is not directly interested in the raw data that is produced, but +wants to produce either a figure for a publication or some post processing of +the data. This module contains multiple classes that enable these kind of +workflows by extracting the relevant data from the HDF5 file and transforming +them into an accessible format. The classes also provide plotting functionality +to get a quick insight about the data, which can then be refined either within +python or a different tool to obtain publication quality figures. + +Generally, all classes provide a `read` function that extracts the data from the +HDF5 file and puts it into a python dictionary. Where it makes sense in addition +a `plot` function is available that converts the data into a figure for Jupyter +notebooks. In addition, data conversion routines `to_X` may be available +transforming the data into another format or file, which may be useful to +generate plots with tools other than python. For the specifics, please refer to +the documentation of the individual classes. +""" + from .band import Band -from .convergence import Convergence from .dos import Dos +from .energy import Energy from .kpoints import Kpoints from .projectors import Projectors import plotly.io as pio import cufflinks as cf +import inspect +import sys pio.templates.default = "ggplot2" cf.go_offline() cf.set_config_file(theme="ggplot") + +_this_mod = sys.modules[__name__] +__all__ = [name for name, _ in inspect.getmembers(_this_mod, inspect.isclass)] diff --git a/src/py4vasp/data/_util.py b/src/py4vasp/data/_util.py index 48f87e0..47778db 100644 --- a/src/py4vasp/data/_util.py +++ b/src/py4vasp/data/_util.py @@ -2,6 +2,28 @@ import py4vasp.raw as raw +def from_file_doc(doc): + return """Read the {} from the given file. + + Parameters + ---------- + file : str or raw.File + Filename from which the data is extracted, using {} if not present. + Alternatively, you can open the file yourself and pass the `File` + object. In that case, you need to take care the file is properly + closed again and be aware the generated instance of this class + becomes unusable after the file is closed. + + Yields + ------ + contextmanager + The returned context manager handles opening and closing the file. + If a `File` object is passed a `nullcontext` is returned. + """.format( + doc, raw.File.default_filename + ) + + @contextmanager def from_file(cls, file, attr): if file is None or isinstance(file, str): @@ -12,6 +34,14 @@ def from_file(cls, file, attr): yield cls(getattr(file, attr)()) +def add_doc(doc): + def add_doc_to_func(func): + func.__doc__ = doc + return func + + return add_doc_to_func + + def decode_if_possible(string): try: return string.decode() diff --git a/src/py4vasp/data/band.py b/src/py4vasp/data/band.py index bce2c0b..016ac50 100644 --- a/src/py4vasp/data/band.py +++ b/src/py4vasp/data/band.py @@ -2,12 +2,61 @@ import itertools import numpy as np import plotly.graph_objects as go -from .projectors import _projectors_or_dummy +from .projectors import _projectors_or_dummy, _selection_doc from .kpoints import Kpoints from py4vasp.data import _util +_to_dict_doc = ( + """ Read the data into a dictionary. + +Parameters +---------- +{} + +Returns +------- +dict + Contains the **k**-point path for plotting band structures with the + eigenvalues shifted to bring the Fermi energy to 0. If available + and a selection is passed, the projections of these bands on the + selected projectors are included. +""" +).format(_selection_doc) + +_to_plotly_doc = ( + """ Read the data and generate a plotly figure. + +Parameters +---------- +{} +width : float + Specifies the width of the flatbands if a selection of projections is specified. +Returns +------- +plotly.graph_objects.Figure + plotly figure containing the spin-up and spin-down bands. If a selection + is provided the width of the bands represents the projections of the + bands onto the specified projectors. +""" +).format(_selection_doc) + class Band: + """ The electronic band structure. + + The most common use case of this class is to produce the electronic band + structure along a path in the Brillouin zone used in a non self consistent + Vasp calculation. In some cases you may want to use the `to_dict` function + just to obtain the eigenvalue and projection data though in that case the + **k**-point distances that are calculated are meaningless. + + Parameters + ---------- + raw_band : raw.Band + Dataclass containing the raw data necessary to produce a band structure + (eigenvalues, kpoints, ...). + """ + def __init__(self, raw_band): self._raw = raw_band self._kpoints = Kpoints(raw_band.kpoints) @@ -15,15 +64,11 @@ def __init__(self, raw_band): self._projectors = _projectors_or_dummy(raw_band.projectors) @classmethod + @_util.add_doc(_util.from_file_doc("electronic band structure")) def from_file(cls, file=None): return _util.from_file(cls, file, "band") - def read(self, *args): - return self.to_dict(*args) - - def plot(self, *args): - return self.to_plotly(*args) - + @_util.add_doc(_to_dict_doc) def to_dict(self, selection=None): return { "kpoint_distances": self._kpoints.distances(), @@ -33,6 +78,11 @@ def to_dict(self, selection=None): "projections": self._projectors.read(selection, self._raw.projections), } + @functools.wraps(to_dict) + def read(self, *args): + return self.to_dict(*args) + + @_util.add_doc(_to_plotly_doc) def to_plotly(self, selection=None, width=0.5): ticks, labels = self._ticks_and_labels() data = self._band_structure(selection, width) @@ -42,6 +92,10 @@ def to_plotly(self, selection=None, width=0.5): } return go.Figure(data=data, layout=default) + @functools.wraps(to_plotly) + def plot(self, *args): + return self.to_plotly(*args) + def _shift_bands_by_fermi_energy(self): if self._spin_polarized: return { diff --git a/src/py4vasp/data/convergence.py b/src/py4vasp/data/convergence.py deleted file mode 100644 index e26f271..0000000 --- a/src/py4vasp/data/convergence.py +++ /dev/null @@ -1,35 +0,0 @@ -import plotly.graph_objects as go -from py4vasp.data import _util - - -class Convergence: - def __init__(self, raw_conv): - self._raw = raw_conv - - @classmethod - def from_file(cls, file=None): - return _util.from_file(cls, file, "convergence") - - def read(self, *args): - return self.to_dict(*args) - - def plot(self, *args): - return self.to_plotly(*args) - - def to_dict(self, selection=None): - if selection is None: - selection = "TOTEN" - for i, label in enumerate(self._raw.labels): - label = str(label, "utf-8").strip() - if selection in label: - return {label: self._raw.energies[:, i]} - - def to_plotly(self, selection=None): - label, data = self.read(selection).popitem() - label = "Temperature (K)" if "TEIN" in label else "Energy (eV)" - data = go.Scatter(y=data) - default = { - "xaxis": {"title": {"text": "Step"}}, - "yaxis": {"title": {"text": label}}, - } - return go.Figure(data=data, layout=default) diff --git a/src/py4vasp/data/dos.py b/src/py4vasp/data/dos.py index 049074c..8c92413 100644 --- a/src/py4vasp/data/dos.py +++ b/src/py4vasp/data/dos.py @@ -2,11 +2,76 @@ import itertools import numpy as np import pandas as pd -from .projectors import _projectors_or_dummy +from .projectors import _projectors_or_dummy, _selection_doc from py4vasp.data import _util +_to_dict_doc = ( + """ Read the data into a dictionary. + +Parameters +---------- +{} + +Returns +------- +dict + Contains the energies at which the DOS was evaluated aligned to the + Fermi energy and the total DOS or the spin-resolved DOS for + spin-polarized calculations. If available and a selection is passed, + the orbital resolved DOS for the selected orbitals is included. +""" +).format(_selection_doc) + +_to_plotly_doc = ( + """ Read the data and generate a plotly figure. + +Parameters +---------- +{} + +Returns +------- +plotly.graph_objects.Figure + plotly figure containing the total DOS. If the calculation was spin + polarized, the resulting DOS is spin resolved and the spin-down DOS + is plotted towards negative values. If a selection the orbital + resolved DOS is given for the specified projectors. +""" +).format(_selection_doc) + +_to_frame_doc = ( + """ Read the data into a pandas DataFrame. + +Parameters +---------- +{} + +Returns +------- +pd.DataFrame + Contains the energies at which the DOS was evaluated aligned to the + Fermi energy and the total DOS or the spin-resolved DOS for + spin-polarized calculations. If available and a selection is passed, + the orbital resolved DOS for the selected orbitals is included. +""" +).format(_selection_doc) + class Dos: + """ The electronic density of states (DOS). + + You can use this class to extract the DOS data of a Vasp calculation. + Typically you want to run a non self consistent calculation with a + denser mesh for a smoother DOS, but the class will work independent + of it. If you generated orbital decomposed DOS, you can use this + class to select which subset of these orbitals to read or plot. + + Parameters + ---------- + raw_dos : raw.Dos + Dataclass containing the raw data necessary to produce a DOS. + """ + def __init__(self, raw_dos): self._raw = raw_dos self._fermi_energy = raw_dos.fermi_energy @@ -18,12 +83,11 @@ def __init__(self, raw_dos): self._projections = raw_dos.projections @classmethod + @_util.add_doc(_util.from_file_doc("electronic DOS")) def from_file(cls, file=None): return _util.from_file(cls, file, "dos") - def plot(self, *args): - return self.to_plotly(*args) - + @_util.add_doc(_to_plotly_doc) def to_plotly(self, selection=None): df = self.to_frame(selection) if self._spin_polarized: @@ -37,12 +101,19 @@ def to_plotly(self, selection=None): } return df.iplot(**default) - def read(self, *args): - return self.to_dict(*args) + @functools.wraps(to_plotly) + def plot(self, *args): + return self.to_plotly(*args) + @_util.add_doc(_to_dict_doc) def to_dict(self, selection=None): return {**self._read_data(selection), "fermi_energy": self._fermi_energy} + @functools.wraps(to_dict) + def read(self, *args): + return self.to_dict(*args) + + @_util.add_doc(_to_frame_doc) def to_frame(self, selection=None): df = pd.DataFrame(self._read_data(selection)) df.fermi_energy = self._fermi_energy diff --git a/src/py4vasp/data/energy.py b/src/py4vasp/data/energy.py new file mode 100644 index 0000000..19b6b29 --- /dev/null +++ b/src/py4vasp/data/energy.py @@ -0,0 +1,80 @@ +import plotly.graph_objects as go +import functools +from py4vasp.data import _util + + +class Energy: + """ The energy data for all steps of a relaxation or MD simulation. + + You can use this class to inspect how the ionic relaxation converges or + during an MD simulation whether the total energy is conserved. + + Parameters + ---------- + raw_energy : raw.Energy + Dataclass containing the raw energy values for the ionic run and labels + specifying which energies are stored. + """ + + def __init__(self, raw_energy): + self._raw = raw_energy + + @classmethod + @_util.add_doc(_util.from_file_doc("energies in an relaxation or MD simulation")) + def from_file(cls, file=None): + return _util.from_file(cls, file, "energy") + + def to_dict(self, selection=None): + """ Read the energy data and store it in a dictionary. + + Parameters + ---------- + selection : str or None + String specifying the label of the energy to be plotted. A substring + of the label is sufficient. If no energy is select this will default + to the total energy. + + Returns + ------- + dict + Contains the exact label corresponding to the selection and the + associated energy for every ionic step. + """ + if selection is None: + selection = "TOTEN" + for i, label in enumerate(self._raw.labels): + label = str(label, "utf-8").strip() + if selection in label: + return {label: self._raw.values[:, i]} + + @functools.wraps(to_dict) + def read(self, *args): + return self.to_dict(*args) + + def to_plotly(self, selection=None): + """ Read the energy data and generate a plotly figure. + + Parameters + ---------- + selection : str or None + String specifying the label of the energy to be plotted. A substring + of the label is sufficient. If no energy is select this will default + to the total energy. + + Returns + ------- + plotly.graph_objects.Figure + plotly figure containing the selected energy for every ionic step. + """ + label, data = self.read(selection).popitem() + label = "Temperature (K)" if "TEIN" in label else "Energy (eV)" + data = go.Scatter(y=data) + default = { + "xaxis": {"title": {"text": "Step"}}, + "yaxis": {"title": {"text": label}}, + } + return go.Figure(data=data, layout=default) + + @functools.wraps(to_plotly) + def plot(self, *args): + return self.to_plotly(*args) diff --git a/src/py4vasp/data/kpoints.py b/src/py4vasp/data/kpoints.py index 0a94f98..d2ed5d5 100644 --- a/src/py4vasp/data/kpoints.py +++ b/src/py4vasp/data/kpoints.py @@ -5,14 +5,35 @@ class Kpoints: + """ The **k** points used in the Vasp calculation. + + This class provides utility functionality to extract information about the + **k** points used by Vasp. As such it is mostly used as a helper class for + other postprocessing classes to extract the required information, e.g., to + generate a band structure. + + Parameters + ---------- + raw_kpoints : raw.Kpoints + Dataclass containing the raw **k**-points data used in the calculation. + """ + def __init__(self, raw_kpoints): self._raw = raw_kpoints self._distances = None - def read(self): - return self.to_dict() - def to_dict(self): + """ Read the **k** points data into a dictionary. + + Returns + ------- + dict + Contains the coordinates of the **k** points (in crystal units) as + well as their weights used for integrations. Moreover, some data + specified in the input file of Vasp are transferred such as the mode + used to generate the **k** points, the line length (if line mode was + used), and any labels set for specific points. + """ return { "mode": self.mode(), "line_length": self.line_length(), @@ -21,15 +42,34 @@ def to_dict(self): "labels": self.labels(), } + @functools.wraps(to_dict) + def read(self): + return self.to_dict() + def line_length(self): + "Get the number of points per line in the Brillouin zone." if self.mode() == "line": return self._raw.number return len(self._raw.coordinates) def number_lines(self): + "Get the number of lines in the Brillouin zone." return len(self._raw.coordinates) // self.line_length() def distances(self): + """ Convert the coordinates of the **k** points into a one dimensional array + + For every line in the Brillouin zone, the distance between each **k** point + and the start of the line is calculated. Then the distances of different + lines are concatenated into a single list. This routine is mostly useful + to plot data along high-symmetry lines like band structures. + + Returns + ------- + np.ndarray + A reduction of the **k** points onto a one-dimensional array based + on the distance between the points. + """ if self._distances is not None: return self._distances cell = self._raw.cell.lattice_vectors * self._raw.cell.scale @@ -43,6 +83,7 @@ def distances(self): return self._distances def mode(self): + "Get the **k**-point generation mode specified in the Vasp input file" mode = _util.decode_if_possible(self._raw.mode).strip() or "# empty string" first_char = mode[0].lower() if first_char == "a": @@ -62,6 +103,7 @@ def mode(self): ) def labels(self): + "Get any labels given in the input file for specific **k** points." if self._raw.labels is None or self._raw.label_indices is None: return None labels = [""] * len(self._raw.coordinates) diff --git a/src/py4vasp/data/projectors.py b/src/py4vasp/data/projectors.py index d65733f..503fd94 100644 --- a/src/py4vasp/data/projectors.py +++ b/src/py4vasp/data/projectors.py @@ -8,6 +8,24 @@ from py4vasp.data import _util from py4vasp.exceptions import UsageException +_selection_doc = """ + selection : str + A string specifying the projection of the orbitals. There are three distinct + possibilities: + + - To specify the **atom**, you can either use its element name (Si, Al, ...) + or its index as given in the input file (1, 2, ...). For the latter + option it is also possible to specify ranges (e.g. 1-4). + - To select a particular **orbital** you can give a string (s, px, dxz, ...) + or select multiple orbitals by their angular momentum (s, p, d, f). + - For the **spin**, you have the options up, down, or total. + + For all of these options a wildcard \* exists, which selects all elements. You + separate multiple selections by commas or whitespace and can nest them using + parenthesis, e.g. `Sr(s, p)` or `s(up), p(down)`. The order of the selections + does not matter, but is is case sensitive to distinguish p (angular momentum + l = 1) from P (phosphorus). + """.strip() _default = "*" _spin_not_set = "not set" @@ -75,15 +93,74 @@ def _is_state_complete(state, char): return state.level == 0 and char in _seperators and state.part != "" +_parse_selection_doc = ( + """ Generate all possible indices where the projected information is stored. + +Given a string specifying which atoms, orbitals, and spin should be selected +an iterable object is created that contains the indices compatible with the +selection. + +Parameters +---------- +{} + +Yields +------ +Iterable[Index] + Indices of the atom, the orbital and the spin compatible with a specific + selection. +""" +).format(_selection_doc) + +_to_dict_doc = ( + """ Read the selected data from an array and store it in a dictionary. + +Parameters +---------- +{} +projections : np.ndarray + Array containing projected data. + +Returns +------- +dict + Dictionary where the label of the selection is linked to a particular + column of the array. If a particular selection includes multiple indices + these elements are added. +""" +).format(_selection_doc) + + class Projectors: + """ The projectors used for atom and orbital resolved quantities. + + This is a common class used by all quantities that contains some projected + quantity, e.g., the electronic band structure and the DOS. It provides + utility functionality to access specific indices of the projected arrays + based on a simple mini language specifying the atom or orbital names. + + Parameters + ---------- + raw_proj : raw.Projectors + Dataclass containing data about the elements, the orbitals, and the spin + for which projectors are available. + """ + class Selection(NamedTuple): + "Helper class specifying which indices to extract their label." indices: Iterable[int] + "Indices from which the specified quantity is read." label: str = "" + "Label identifying the quantity." class Index(NamedTuple): + "Helper class specifying which atom, orbital, and spin are selected." atom: Union[str, Selection] + "Label of the atom or a Selection object to read the corresponding data." orbital: Union[str, Selection] + "Label of the orbital or a Selection object to read the corresponding data." spin: Union[str, Selection] + "Label of the spin component or a Selection object to read the corresponding data." def __init__(self, raw_proj): self._raw = raw_proj @@ -93,6 +170,7 @@ def __init__(self, raw_proj): self._spin_polarized = raw_proj.number_spins == 2 @classmethod + @_util.add_doc(_util.from_file_doc("atom and orbital projectors")) def from_file(cls, file=None): return _util.from_file(cls, file, "projectors") @@ -133,6 +211,29 @@ def _init_spin_dict(self, raw_proj): } def select(self, atom=_default, orbital=_default, spin=_default): + """ Map selection strings onto corresponding Selection objects. + + Parameters + ---------- + atom : str + Element name or index of the atom in the input file of Vasp. If a + range is specified (e.g. 1-3) a pointer to multiple indices will be + created. + orbital : str + Character identifying the angular momentum of the orbital. You may + select a specific one (e.g. px) or all of the same character (e.g. d). + spin : str + Select "up" or "down" for a specific spin component or "total" for + the sum of both. + For all parameters you can pass "*" to default to all (atoms, orbitals, + or spins). + + Returns + ------- + Index + Indices to access the selected projection from an array and an + associated label. + """ return self.Index( atom=self._select_atom(atom), orbital=self._orbital_dict[orbital], @@ -148,6 +249,7 @@ def _select_atom(self, atom): else: return self._atom_dict[atom] + @_util.add_doc(_parse_selection_doc) def parse_selection(self, selection): default_index = self.Index(atom=_default, orbital=_default, spin=_spin_not_set) yield from self._parse_recursive(selection, default_index) @@ -183,11 +285,16 @@ def _setup_spin_indices(self, index): for key in ("up", "down"): yield index._replace(spin=key) - def read(self, selection, projections): + @_util.add_doc(_to_dict_doc) + def to_dict(self, selection, projections): if selection is None: return {} return self._read_elements(selection, projections) + @functools.wraps(to_dict) + def read(self, *args): + return self.to_dict(*args) + def _read_elements(self, selection, projections): res = {} for select in self.parse_selection(selection): @@ -214,7 +321,7 @@ class _NoProjectorsAvailable: def read(self, selection, projections): if selection is not None: raise UsageException( - "Projectors are not available, rerun Vasp setting LORBIT = 10 or 11." + "Projectors are not available, rerun Vasp setting LORBIT >= 10." ) return {} diff --git a/src/py4vasp/exceptions/__init__.py b/src/py4vasp/exceptions/__init__.py index 281c8b3..cac607b 100644 --- a/src/py4vasp/exceptions/__init__.py +++ b/src/py4vasp/exceptions/__init__.py @@ -1 +1,14 @@ +""" Deals with the possible exceptions in py4vasp. + +The design goal is that all forseeable exceptions in py4vasp issue an exception +of the Py4VaspException class. Any other kind of exception would indicate a bug +in the code. If possible the part standard users interact with should not raise +any exception, but should give advice on how to overcome the issue. +""" + from .exceptions import * +import inspect +import sys + +_this_mod = sys.modules[__name__] +__all__ = [name for name, _ in inspect.getmembers(_this_mod, inspect.isclass)] diff --git a/src/py4vasp/raw/__init__.py b/src/py4vasp/raw/__init__.py index 0a4e1b3..a4a7b49 100644 --- a/src/py4vasp/raw/__init__.py +++ b/src/py4vasp/raw/__init__.py @@ -1,2 +1,22 @@ +""" Extract the raw data from the HDF5 file and transform it into dataclasses. + +In the HDF5 file, the raw data is stored with specific keys. In order to avoid +propagating the name of these keys to the higher tier modules, we transform +everything into dataclasses. This enables introducing new file formats by +replacing the `File` class. + +Notes +----- +The data from the HDF5 file is lazily loaded except for scalars. This avoids +memory issues when the HDF5 file contains a lot of data, because only what is +actually needed is read. However this has the consequence that you need to +enforce the read operation, before the file is closed. +""" + from .rawdata import * from .file import File +import inspect +import sys + +_this_mod = sys.modules[__name__] +__all__ = [name for name, _ in inspect.getmembers(_this_mod, inspect.isclass)] diff --git a/src/py4vasp/raw/file.py b/src/py4vasp/raw/file.py index 308f3f9..ca31247 100644 --- a/src/py4vasp/raw/file.py +++ b/src/py4vasp/raw/file.py @@ -4,7 +4,32 @@ class File(AbstractContextManager): + """ Extract raw data from the HDF5 file. + + This class opens a given HDF5 file and its functions then provide access to + the raw data via dataclasses. When you request the dataclass for a certain + quantity, this class will generate the necessary pointers to the relevant + HDF5 datasets, which can then be accessed like numpy arrays. + + This class also extends a context manager so it can be used to automatically + deal with closing the HDF5 file. You cannot access the data in the + dataclasses after you closed the HDF5 file. + + Parameters + ---------- + filename : str + Name of the file from which the data is read (defaults to default_filename). + + Notes + ----- + Except for scalars this class does not actually load the data from file. It + only creates a pointer to the correct position in the HDF5 file. So you need + to extract the data before closing the file. This lazy loading significantly + enhances the performance if you are only interested in a subset of the data. + """ + default_filename = "vaspout.h5" + "Name of the HDF5 file Vasp creates." def __init__(self, filename=None): filename = filename or File.default_filename @@ -12,6 +37,15 @@ def __init__(self, filename=None): self.closed = False def dos(self): + """ Read the electronic density of states (Dos). + + Returns + ------- + raw.Dos + A list of energies E and the associated raw electronic Dos D(E). The + energies need to be manually shifted to the Fermi energy. If + available, the projections on a set of projectors are included. + """ self._assert_not_closed() return raw.Dos( fermi_energy=self._h5f["results/electron_dos/efermi"][()], @@ -22,6 +56,15 @@ def dos(self): ) def band(self): + """ Read the band structure generated by Vasp. + + Returns + ------- + raw.Band + The raw electronic eigenvalues at the specific **k** points. These + values need to be manually aligned to the Fermi energy if desired. + If available the projections on a set of projectors are included. + """ self._assert_not_closed() return raw.Band( fermi_energy=self._h5f["results/electron_dos/efermi"][()], @@ -32,6 +75,14 @@ def band(self): ) def projectors(self): + """Read the projectors information if present. + + Returns + ------- + raw.Projectors or None + If Vasp was set to produce the orbital decomposition of the bands + the associated projector information is returned. + """ self._assert_not_closed() if "results/projectors" not in self._h5f: return None @@ -43,6 +94,16 @@ def projectors(self): ) def kpoints(self): + """ Read the **k** points at which Vasp evaluated the wave functions. + + Returns + ------- + raw.Kpoints + In addition to the coordinates of the **k** points and the cell + information, we include some information given in the input file + about the generation and labels of the **k** points, which may be + useful for band structures. + """ self._assert_not_closed() return raw.Kpoints( mode=self._h5f["input/kpoints/mode"][()], @@ -55,20 +116,36 @@ def kpoints(self): ) def cell(self): + """ Read the unit cell information of the crystal. + + Returns + ------- + raw.Cell + The lattice vectors of the unit cell scaled by a constant factor. + """ self._assert_not_closed() return raw.Cell( scale=self._h5f["results/positions/scale"][()], lattice_vectors=self._h5f["results/positions/lattice_vectors"], ) - def convergence(self): + def energy(self): + """ Read the energies during the ionic convergence. + + Returns + ------- + raw.Energy + Information about different energies for every step in the relaxation + or MD simulation. + """ self._assert_not_closed() - return raw.Convergence( + return raw.Energy( labels=self._h5f["intermediate/history/energies_tags"], - energies=self._h5f["intermediate/history/energies"], + values=self._h5f["intermediate/history/energies"], ) def close(self): + "Close the associated HDF5 file (automatically if used as context manager)." self._h5f.close() self.closed = True diff --git a/src/py4vasp/raw/rawdata.py b/src/py4vasp/raw/rawdata.py index c1d2e1e..962718f 100644 --- a/src/py4vasp/raw/rawdata.py +++ b/src/py4vasp/raw/rawdata.py @@ -21,54 +21,85 @@ def _only_one_None(lhs, rhs): @dataclass class Projectors: + "Projectors used for orbital projections." number_ion_types: np.ndarray + "Amount of ions of a particular type." ion_types: np.ndarray + "Element of a particular type." orbital_types: np.ndarray + "Character indicating the orbital angular momentum." number_spins: int + "Indicates whether the calculation is spin polarized or not." __eq__ = _dataclass_equal @dataclass class Cell: + "Unit cell of the crystal or simulation cell for molecules." scale: float + "Global scaling factor applied to all lattice vectors." lattice_vectors: np.ndarray + "Lattice vectors defining the unit cell." __eq__ = _dataclass_equal @dataclass class Kpoints: + "**k** points at which wave functions are calculated." mode: str + "Mode used to generate the **k**-point list." number: int + "Number of **k** points specified in the generation." coordinates: np.ndarray + "Coordinates of the **k** points as fraction of the reciprocal lattice vectors." weights: np.ndarray + "Weight of the **k** points used for integration." cell: Cell + "Unit cell of the crystal." labels: np.ndarray = None + "High symmetry label for specific **k** points used in band structures." label_indices: np.ndarray = None + "Indices of the labeled **k** points in the generation list." __eq__ = _dataclass_equal @dataclass class Dos: + "Electronic density of states." fermi_energy: float + "Fermi energy obtained by Vasp." energies: np.ndarray + "Energy E at which the Dos is evaluated." dos: np.ndarray + "Dos at the energies D(E)." projections: np.ndarray = None + "If present, orbital projections of the Dos." projectors: Projectors = None + "If present, projector information (element, angular momentum, spin)." __eq__ = _dataclass_equal @dataclass class Band: + "Electronic band structure" fermi_energy: float + "Fermi energy obtained by Vasp." kpoints: Kpoints + "**k** points at which the bands are calculated." eigenvalues: np.ndarray + "Calculated eigenvalues at the **k** points." projections: np.ndarray = None + "If present, orbital projections of the bands." projectors: Projectors = None + "If present, projector information (element, angular momentum, spin)." __eq__ = _dataclass_equal @dataclass -class Convergence: +class Energy: + "Various energies during ionic relaxation or MD simulation." labels: np.ndarray - energies: np.ndarray + "Label identifying which energy is contained." + values: np.ndarray + "Energy specified by labels for all iteration steps." __eq__ = _dataclass_equal diff --git a/tests/data/test_convergence.py b/tests/data/test_convergence.py deleted file mode 100644 index 1552fd8..0000000 --- a/tests/data/test_convergence.py +++ /dev/null @@ -1,41 +0,0 @@ -from py4vasp.data import Convergence -import pytest -import numpy as np -import py4vasp.raw as raw - - -@pytest.fixture -def reference_convergence(): - labels = np.array(("ion-electron TOTEN ", "temperature TEIN"), dtype="S") - shape = (100, len(labels)) - return raw.Convergence( - labels=labels, energies=np.arange(np.prod(shape)).reshape(shape) - ) - - -def test_read_convergence(reference_convergence, Assert): - conv = Convergence(reference_convergence) - dict_ = conv.read() - assert len(dict_) == 1 - label, data = dict_.popitem() - assert label == reference_convergence.labels[0].decode().strip() - Assert.allclose(data, reference_convergence.energies[:, 0]) - label, data = conv.read("temperature").popitem() - assert label == reference_convergence.labels[1].decode().strip() - Assert.allclose(data, reference_convergence.energies[:, 1]) - - -def test_plot_convergence(reference_convergence, Assert): - conv = Convergence(reference_convergence) - fig = conv.plot() - assert fig.layout.xaxis.title.text == "Step" - assert fig.layout.yaxis.title.text == "Energy (eV)" - Assert.allclose(fig.data[0].y, reference_convergence.energies[:, 0]) - fig = conv.plot("temperature") - assert fig.layout.yaxis.title.text == "Temperature (K)" - Assert.allclose(fig.data[0].y, reference_convergence.energies[:, 1]) - - -def test_convergence_from_file(reference_convergence, mock_file, check_read): - with mock_file("convergence", reference_convergence) as mocks: - check_read(Convergence, mocks, reference_convergence) diff --git a/tests/data/test_energy.py b/tests/data/test_energy.py new file mode 100644 index 0000000..5c61c4c --- /dev/null +++ b/tests/data/test_energy.py @@ -0,0 +1,39 @@ +from py4vasp.data import Energy +import pytest +import numpy as np +import py4vasp.raw as raw + + +@pytest.fixture +def reference_energy(): + labels = np.array(("ion-electron TOTEN ", "temperature TEIN"), dtype="S") + shape = (100, len(labels)) + return raw.Energy(labels=labels, values=np.arange(np.prod(shape)).reshape(shape)) + + +def test_read_energy(reference_energy, Assert): + conv = Energy(reference_energy) + dict_ = conv.read() + assert len(dict_) == 1 + label, data = dict_.popitem() + assert label == reference_energy.labels[0].decode().strip() + Assert.allclose(data, reference_energy.values[:, 0]) + label, data = conv.read("temperature").popitem() + assert label == reference_energy.labels[1].decode().strip() + Assert.allclose(data, reference_energy.values[:, 1]) + + +def test_plot_energy(reference_energy, Assert): + conv = Energy(reference_energy) + fig = conv.plot() + assert fig.layout.xaxis.title.text == "Step" + assert fig.layout.yaxis.title.text == "Energy (eV)" + Assert.allclose(fig.data[0].y, reference_energy.values[:, 0]) + fig = conv.plot("temperature") + assert fig.layout.yaxis.title.text == "Temperature (K)" + Assert.allclose(fig.data[0].y, reference_energy.values[:, 1]) + + +def test_energy_from_file(reference_energy, mock_file, check_read): + with mock_file("energy", reference_energy) as mocks: + check_read(Energy, mocks, reference_energy) diff --git a/tests/raw/test_file.py b/tests/raw/test_file.py index 2b1f749..759ea0f 100644 --- a/tests/raw/test_file.py +++ b/tests/raw/test_file.py @@ -229,18 +229,16 @@ def test_energies(tmpdir): def reference_energies(): labels = np.array(["total", "kinetic", "temperature"], dtype="S") shape = (100, len(labels)) - return raw.Convergence( - labels=labels, energies=np.arange(np.prod(shape)).reshape(shape) - ) + return raw.Energy(labels=labels, values=np.arange(np.prod(shape)).reshape(shape)) -def write_energies(h5f, convergence): - h5f["intermediate/history/energies_tags"] = convergence.labels - h5f["intermediate/history/energies"] = convergence.energies +def write_energies(h5f, energy): + h5f["intermediate/history/energies_tags"] = energy.labels + h5f["intermediate/history/energies"] = energy.values def check_energies(file, reference): - assert file.convergence() == reference + assert file.energy() == reference def test_kpoints(tmpdir):