Update LBANN generator and bug fix (#9906)

* Added a package for the MDAnalysis toolkit.

* Added Ninja as the generator for LBANN, Hydrogen, and Aluminum.  Also
fixed a bug in how the Conduit variant is included in LBANN.  Added
new versions for Hydrogen and LBANN.

* Updated lbann package so that the CMAKE parameters are set when the
variant is on and off.
This commit is contained in:
Brian Van Essen 2018-12-10 15:12:43 -08:00 committed by Greg Becker
parent ad046402b6
commit abd575f273
3 changed files with 28 additions and 13 deletions

View file

@ -34,6 +34,9 @@ class Aluminum(CMakePackage):
depends_on('nccl', when='+nccl')
depends_on('hwloc')
generator = 'Ninja'
depends_on('ninja', type='build')
def cmake_args(self):
spec = self.spec
args = [

View file

@ -13,10 +13,11 @@ class Hydrogen(CMakePackage):
and optimization library. Based on the Elemental library."""
homepage = "http://libelemental.org"
url = "https://github.com/LLNL/Elemental/archive/0.99.tar.gz"
url = "https://github.com/LLNL/Elemental/archive/v1.0.1.tar.gz"
git = "https://github.com/LLNL/Elemental.git"
version('develop', branch='hydrogen')
version('1.0.1', sha256='27cf76e1ef1d58bd8f9b1e34081a14a682b7ff082fb5d1da56713e5e0040e528')
version('1.0', sha256='d8a97de3133f2c6b6bb4b80d32b4a4cc25eb25e0df4f0cec0f8cb19bf34ece98')
version('0.99', 'b678433ab1d498da47acf3dc5e056c23')
@ -93,6 +94,9 @@ class Hydrogen(CMakePackage):
conflicts('@0:0.98', msg="Hydrogen did not exist before v0.99. " +
"Did you mean to use Elemental instead?")
generator = 'Ninja'
depends_on('ninja', type='build')
@property
def libs(self):
shared = True if '+shared' in self.spec else False

View file

@ -18,6 +18,7 @@ class Lbann(CMakePackage):
git = "https://github.com/LLNL/lbann.git"
version('develop', branch='develop')
version('0.96', sha256='97af78e9d3c405e963361d0db96ee5425ee0766fa52b43c75b8a5670d48e4b4a')
version('0.95', sha256='d310b986948b5ee2bedec36383a7fe79403721c8dc2663a280676b4e431f83c2')
version('0.94', sha256='567e99b488ebe6294933c98a212281bffd5220fc13a0a5cd8441f9a3761ceccf')
version('0.93', '1913a25a53d4025fa04c16f14afdaa55')
@ -34,8 +35,9 @@ class Lbann(CMakePackage):
variant('build_type', default='Release',
description='The build type to build',
values=('Debug', 'Release'))
variant('al', default=True, description='Builds with support for Aluminum Library')
variant('al', default=False, description='Builds with support for Aluminum Library')
variant('conduit', default=False, description='Builds with support for Conduit Library')
variant('vtune', default=False, description='Builds with support for Intel VTune')
# It seems that there is a need for one statement per version bounds
depends_on('hydrogen +openmp_blas +shared +int64', when='@:0.90,0.95: ~al')
@ -82,12 +84,15 @@ class Lbann(CMakePackage):
'~pthreads_pf ~python ~qt ~stitching ~superres ~ts ~video'
'~videostab ~videoio ~vtk', when='+opencv')
depends_on('protobuf@3.0.2:')
depends_on('protobuf@3.0.2: build_type=Release')
depends_on('cnpy')
depends_on('nccl', when='+gpu +nccl')
depends_on('conduit@master +hdf5', when='+conduit')
generator = 'Ninja'
depends_on('ninja', type='build')
@property
def common_config_args(self):
spec = self.spec
@ -110,10 +115,16 @@ def cmake_args(self):
args = self.common_config_args
args.extend([
'-DLBANN_WITH_TOPO_AWARE:BOOL=%s' % ('+gpu +nccl' in spec),
'-DLBANN_WITH_ALUMINUM:BOOL=%s' % ('+al' in spec),
'-DLBANN_WITH_CONDUIT:BOOL=%s' % ('+conduit' in spec),
'-DLBANN_WITH_CUDA:BOOL=%s' % ('+gpu' in spec),
'-DLBANN_WITH_CUDNN:BOOL=%s' % ('+gpu' in spec),
'-DLBANN_WITH_NCCL:BOOL=%s' % ('+gpu +nccl' in spec),
'-DLBANN_WITH_SOFTMAX_CUDA:BOOL=%s' % ('+gpu' in spec),
'-DLBANN_SEQUENTIAL_INITIALIZATION:BOOL=%s' %
('+seq_init' in spec),
'-DLBANN_WITH_TBINF=OFF',
'-DLBANN_WITH_VTUNE=OFF',
'-DLBANN_WITH_VTUNE:BOOL=%s' % ('+vtune' in spec),
'-DLBANN_DATATYPE={0}'.format(spec.variants['dtype'].value),
'-DLBANN_VERBOSE=0'])
@ -126,14 +137,15 @@ def cmake_args(self):
'-DElemental_DIR={0}/CMake/elemental'.format(
spec['elemental'].prefix)])
if '+vtune' in spec:
args.extend(['-DVTUNE_DIR={0}'.format(spec['vtune'].prefix)])
if '+al' in spec:
args.extend(['-DLBANN_WITH_ALUMINUM:BOOL=%s' % ('+al' in spec),
'-DAluminum_DIR={0}'.format(spec['aluminum'].prefix)])
args.extend(['-DAluminum_DIR={0}'.format(spec['aluminum'].prefix)])
if '+conduit' in spec:
args.extend(['-DLBANN_CONDUIT_DIR:BOOL=%s' % ('+conduit' in spec),
'-DLBANN_CONDUIT_DIR={0}'.format(
spec['conduit'].prefix)])
args.extend(['-DLBANN_CONDUIT_DIR={0}'.format(
spec['conduit'].prefix)])
# Add support for OpenMP
if (self.spec.satisfies('%clang')):
@ -153,19 +165,15 @@ def cmake_args(self):
if '+gpu' in spec:
args.extend([
'-DLBANN_WITH_CUDA:BOOL=%s' % ('+gpu' in spec),
'-DLBANN_WITH_SOFTMAX_CUDA:BOOL=%s' % ('+gpu' in spec),
'-DCUDA_TOOLKIT_ROOT_DIR={0}'.format(
spec['cuda'].prefix)])
args.extend([
'-DLBANN_WITH_CUDNN:BOOL=%s' % ('+gpu' in spec),
'-DcuDNN_DIR={0}'.format(
spec['cudnn'].prefix)])
args.extend(['-DCUB_DIR={0}'.format(
spec['cub'].prefix)])
if '+nccl' in spec:
args.extend([
'-DLBANN_WITH_NCCL:BOOL=%s' % ('+gpu +nccl' in spec),
'-DNCCL_DIR={0}'.format(
spec['nccl'].prefix)])