Skip to content

Commit

Permalink
[pytorch-fortran] Add spack recipe (#816)
Browse files Browse the repository at this point in the history
  • Loading branch information
jonasjucker authored Aug 31, 2023
1 parent 2b1ad6a commit abe6427
Show file tree
Hide file tree
Showing 5 changed files with 161 additions and 0 deletions.
49 changes: 49 additions & 0 deletions repos/c2sm/packages/libtorch/package.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

from spack import *
import shutil


class Libtorch(Package):
'''
PyTorch is a Python package that provides two high-level features:
- Tensor computation (like NumPy) with strong GPU acceleration
- Deep neural networks built on a tape-based autograd system
'''

homepage = "https://pytorch.org/"
url = "https://download.pytorch.org/libtorch/cu117/libtorch-cxx11-abi-shared-with-deps-2.0.1%2Bcu117.zip"

maintainers = ['juckerj']

version('2.0.1',
url=url,
sha256=
'262f723ee5a2caac977e089bc06e9d840ca33d70706fbd4a2fca04995bb94eb4')

phases = ['install']

def install(self, spec, prefix):
# can't use Spack convenience-function 'install_tree' because it uses
# shutil.copy2 under the hood. For an unknown reason installing from
# the unzipped tarbal only works using shutil.copy.
shutil.copytree('lib',
prefix.lib,
symlinks=True,
copy_function=shutil.copy)
shutil.copytree('include',
prefix.include,
symlinks=True,
copy_function=shutil.copy)
shutil.copytree('share',
prefix.share,
symlinks=True,
copy_function=shutil.copy)
shutil.copytree('bin',
prefix.bin,
symlinks=True,
copy_function=shutil.copy)
39 changes: 39 additions & 0 deletions repos/c2sm/packages/pytorch-fortran-proxy/package.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

#
from spack import *


class PytorchFortranProxy(CMakePackage):
'''
Pytorch Fortran bindings - C++ Backend
The goal of this code is to provide Fortran HPC codes with a simple way to use
Pytorch deep learning framework. We want Fortran developers to take advantage
of rich and optimized Torch ecosystem from within their existing codes.
'''

homepage = "https://github.com/alexeedm/pytorch-fortran"
url = "https://github.com/alexeedm/pytorch-fortran.git"

version('0.4', git=url, tag='v0.4')

maintainers = ['juckerj']

depends_on('cuda')
depends_on('libtorch')
depends_on('py-pybind11')

root_cmakelists_dir = 'src/proxy_lib'

def cmake_args(self):
args = [
self.define('OPENACC', 1),
self.define('CUDA_TOOLKIT_ROOT_DIR', self.spec['cuda'].prefix),
self.define('TORCH_CUDA_ARCH_LIST', "6.0")
]

return args
33 changes: 33 additions & 0 deletions repos/c2sm/packages/pytorch-fortran/package.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

#
from spack import *


class PytorchFortran(CMakePackage):
'''
Pytorch Fortran bindings - Fortran Frontend
The goal of this code is to provide Fortran HPC codes with a simple way to use
Pytorch deep learning framework. We want Fortran developers to take advantage
of rich and optimized Torch ecosystem from within their existing codes.
'''

homepage = "https://github.com/alexeedm/pytorch-fortran"
url = "https://github.com/alexeedm/pytorch-fortran.git"

version('0.4', git=url, tag='v0.4')

maintainers = ['juckerj']

depends_on('pytorch-fortran-proxy')

root_cmakelists_dir = 'src/f90_bindings'

def cmake_args(self):
args = [self.define('OPENACC', 1)]

return args
18 changes: 18 additions & 0 deletions test/integration_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,9 @@ def test_int2lm(self):
def test_libcdi_pio(self):
spack_info('libcdi-pio')

def test_libtorch(self):
spack_info('libtorch')

def test_libyaml(self):
spack_info('libyaml')

Expand All @@ -129,6 +132,12 @@ def test_oasis(self):
def test_omni_xmod_pool(self):
spack_info('omni-xmod-pool')

def test_pytorch_fortran(self):
spack_info('pytorch-fortran')

def test_pytorch_fortran_proxy(self):
spack_info('pytorch-fortran-proxy')

def test_py_black(self):
spack_info('py-black')

Expand Down Expand Up @@ -288,6 +297,9 @@ def test_int2lm(self):
spack_spec('int2lm')
spack_spec('int2lm +parallel')

def test_libtorch(self):
spack_spec('libtorch')

def test_libcdi_pio(self):
spack_spec('libcdi-pio')

Expand Down Expand Up @@ -315,6 +327,12 @@ def test_oasis(self):
def test_omni_xmod_pool(self):
spack_spec('omni-xmod-pool')

def test_pytorch_fortran(self):
spack_spec('pytorch-fortran')

def test_pytorch_fortran_proxy(self):
spack_spec('pytorch-fortran-proxy')

def test_py_black(self):
spack_spec('py-black')

Expand Down
22 changes: 22 additions & 0 deletions test/system_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,6 +489,12 @@ def test_install_c2sm_master_nvhpc(self):
)


class LibTorchTest(unittest.TestCase):

def test_install_default(self):
spack_install('libtorch')


@pytest.mark.no_tsa # Test is too expensive. It takes over 5h.
class LibCdiPioTest(unittest.TestCase):

Expand Down Expand Up @@ -557,6 +563,22 @@ def test_install_version_0_1(self):
spack_install_and_test('omni-xmod-pool @0.1')


@pytest.mark.no_tsa
class PytorchFortranTest(unittest.TestCase):

def test_install_version_0_4(self):
spack_install(
'[email protected]%nvhpc ^[email protected]%gcc ^[email protected]'
)


@pytest.mark.no_tsa
class PytorchFortranProxyTest(unittest.TestCase):

def test_install_version_0_4(self):
spack_install('[email protected]%gcc ^[email protected]')


class PyBlackTest(unittest.TestCase):

def test_install_default(self):
Expand Down

0 comments on commit abe6427

Please sign in to comment.