hdf-eos2: support version @3.0, plus assorted fixes (#41782)

1) support for version @3.0
Unfortunately, download seems to require registration now
so using manual_download mechanism for @3:

2) copying from hdf-eos5 patch from @vanderwb to enable
use of Spack compiler wrappers instead of h4cc

3) Patching an issue in hdf-eos2 configure script.  The
script will test for jpeg, libz libraries, succeed and
append HAVE_LIBJPEG=1, etc to confdefs.h, and then abort
because HAVE_LIBJPEG not set in running environment.

4) Add some LDFLAGS to build environment.  Otherwise
seems to fail to build test script due to rpc dependence
in HDF4.
This commit is contained in:
Tom Payerle 2024-03-04 15:08:19 -05:00 committed by GitHub
parent 00baaf868e
commit 76ec19b26e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 79 additions and 14 deletions

View file

@ -0,0 +1,25 @@
# Fix some issues with hdf-eos2 configure script
# configure (according to config.log) would find the jpeg
# and libz libs, but then abort complaining they were not
# found because HAVE_LIBJPEG, etc not set (the script just
# appended the lines to confdefs.h but not set in the environment
# of the running script and script tests the env variable)
diff -Naur spack-src/configure spack-src.patched/configure
--- spack-src/configure 2023-12-18 14:56:50.796875000 -0500
+++ spack-src.patched/configure 2023-12-18 15:01:50.780038643 -0500
@@ -12488,6 +12488,7 @@
_ACEOF
LIBS="-ljpeg $LIBS"
+ HAVE_LIBJPEG=1
else
@@ -12863,6 +12864,7 @@
_ACEOF
LIBS="-lz $LIBS"
+ HAVE_LIBZ=1
else

View file

@ -3,8 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import sys
from os import chmod
from spack.package import *
@ -16,10 +16,15 @@ class HdfEos2(AutotoolsPackage):
"""
homepage = "https://hdfeos.org"
# The download URLs are messing, and include sha256 checksum.
# Starting with @3, download requires authentication. So reverting
# to a manual download
url = "file://{0}/hdf-eos2-3.0-src.tar.gz".format(os.getcwd())
manual_download = True
# The download URLs for @2 versions are messing, and include sha256 checksum.
# Templates for url_for_version. 0 is sha256 checksum, 1 is filename
# This is just a template. See version_list and url_for_version below
# Template for url_for_version. 0 is sha256 checksum, 1 is filename
url = "https://git.earthdata.nasa.gov/rest/git-lfs/storage/DAS/hdfeos/{0}?response-content-disposition=attachment%3B%20filename%3D%22{1}%22%3B%20filename*%3Dutf-8%27%27{1}"
v2url = "https://git.earthdata.nasa.gov/rest/git-lfs/storage/DAS/hdfeos/{0}?response-content-disposition=attachment%3B%20filename%3D%22{1}%22%3B%20filename*%3Dutf-8%27%27{1}"
maintainers("climbfuji")
@ -29,15 +34,23 @@ class HdfEos2(AutotoolsPackage):
# In basename expansions, 0 is raw version,
# 1 is for version with dots => underscores
version_list = [
{
"version": "3.0",
"basename": "hdf-eos2-{0}-src.tar.gz",
"sha256": "3a5564b4d69b541139ff7dfdad948696cf31d9d1a6ea8af290c91a4c0ee37188",
"can_auto_download": False,
},
{
"version": "2.20v1.00",
"sha256": "cb0f900d2732ab01e51284d6c9e90d0e852d61bba9bce3b43af0430ab5414903",
"basename": "HDF-EOS{0}.tar.Z",
"can_auto_download": True,
},
{
"version": "2.19b",
"sha256": "a69993508dbf5fa6120bac3c906ab26f1ad277348dfc2c891305023cfdf5dc9d",
"basename": "hdfeos{1}.zip",
"can_auto_download": True,
},
]
@ -63,19 +76,27 @@ class HdfEos2(AutotoolsPackage):
depends_on("jpeg")
depends_on("szip", when="^hdf +szip")
# Fix some problematic logic in stock configure script
# test succeeds, but then script aborts due to env variable not being set
patch("hdf-eos2.configure.patch", when="@2:3.0")
# The standard Makefile.am, etc. add a --single_module flag to LDFLAGS
# to pass to the linker.
# That appears to be only recognized by the Darwin linker, remove it
# if we are not running on darwin/
if sys.platform != "darwin":
patch("hdf-eos2.nondarwin-no-single_module.patch")
patch("hdf-eos2.nondarwin-no-single_module.patch", when="@2")
def url_for_version(self, version):
vrec = [x for x in self.version_list if x["version"] == version.dotted.string]
if vrec:
fname = vrec[0]["basename"].format(version.dotted, version.underscored)
sha256 = vrec[0]["sha256"]
myurl = self.url.format(sha256, fname)
can_auto_download = vrec[0].get("can_auto_download", False)
if can_auto_download:
myurl = self.v2url.format(sha256, fname)
else:
myurl = self.url
return myurl
else:
sys.exit(
@ -83,12 +104,20 @@ def url_for_version(self, version):
"version/checksum not found in version_list".format(version)
)
@run_before("configure")
def fix_configure(self):
# spack patches the configure file unless autoconf is run,
# and this fails because configure has the wrong permissions (644)
@run_before("configure")
def fix_permissions(self):
if not self.force_autoreconf:
chmod(join_path(self.stage.source_path, "configure"), 0o755)
os.chmod(join_path(self.stage.source_path, "configure"), 0o755)
# The configure script as written really wants you to use h4cc.
# This causes problems because h4cc differs when HDF is built with
# autotools vs cmake, and we lose all the nice flags from the
# Spack wrappers. These filter operations allow us to use the
# Spack wrappers again
filter_file("\\$CC -show &> /dev/null", "true", "configure")
filter_file("CC=./\\$SZIP_CC", "", "configure")
def flag_handler(self, name, flags):
if self.spec.compiler.name == "apple-clang":
@ -97,12 +126,19 @@ def flag_handler(self, name, flags):
return flags, None, None
def setup_build_environment(self, env):
# Add flags to LDFLAGS for any dependencies that need it
extra_ldflags = []
# hdf might have link dependency on rpc, if so need to add flags
if "rpc" in self.spec:
tmp = self.spec["rpc"].libs.ld_flags
extra_ldflags.append(tmp)
# Set LDFLAGS
env.set("LDFLAGS", " ".join(extra_ldflags))
def configure_args(self):
extra_args = []
# Package really wants h4cc to be used
extra_args.append("CC={0}/bin/h4cc -Df2cFortran".format(self.spec["hdf"].prefix))
# We always build PIC code
extra_args.append("--with-pic")
extra_args.append("--enable-install_include")
@ -114,7 +150,11 @@ def configure_args(self):
# Provide config args for dependencies
extra_args.append("--with-hdf4={0}".format(self.spec["hdf"].prefix))
if "jpeg" in self.spec:
extra_args.append("--with-jpeg={0}".format(self.spec["jpeg"].prefix))
# Allow handling whatever provider of jpeg are using
tmp = self.spec["jpeg"].libs.directories
if tmp:
tmp = tmp[0]
extra_args.append("--with-jpeg={0}".format(tmp))
if "szip" in self.spec:
extra_args.append("--with-szlib={0}".format(self.spec["szip"].prefix))
if "zlib" in self.spec: