Skip to content

Commit ce11692

Browse files
committed
Use pyproject.toml for project metadata
1 parent 2c54949 commit ce11692

File tree

3 files changed

+78
-112
lines changed

3 files changed

+78
-112
lines changed

pyproject.toml

Lines changed: 37 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,40 @@
11
[build-system]
2-
# Minimum requirements for the build system to execute.
3-
requires = [ # PEP 508 specifications.
4-
"numpy",
2+
requires = [
53
"setuptools",
6-
"wheel"
4+
"numpy",
5+
]
6+
build-backend = "setuptools.build_meta"
7+
8+
[project]
9+
name = "pyhdf"
10+
version = "0.10.5"
11+
description = "Python interface to the NCSA HDF4 library"
12+
readme = "README.md"
13+
keywords = ['hdf4', 'netcdf', 'numpy', 'python', 'pyhdf']
14+
classifiers = [
15+
"Development Status :: 5 - Production/Stable",
16+
"Intended Audience :: Science/Research",
17+
"Intended Audience :: Developers",
18+
"License :: OSI Approved",
19+
"Programming Language :: C",
20+
"Programming Language :: Python",
21+
"Programming Language :: Python :: 3",
22+
"Topic :: Software Development",
23+
"Topic :: Scientific/Engineering",
24+
"Operating System :: Microsoft :: Windows",
25+
"Operating System :: POSIX",
26+
"Operating System :: Unix",
27+
"Operating System :: MacOS",
28+
]
29+
license = {text = "MIT License"}
30+
authors = [
31+
{name = "Andre Gosselin", email = "Andre.Gosselin@dfo-mpo.gc.ca"},
32+
{name = "Travis E. Oliphant", email = "teoliphant@gmail.com"},
33+
]
34+
maintainers = [
35+
{name = "Fazlul Shahriar", email = "fshahriar@gmail.com"},
736
]
37+
dependencies = []
38+
39+
[project.urls]
40+
Homepage = 'https://github.com/fhs/pyhdf'

setup.cfg

Lines changed: 0 additions & 12 deletions
This file was deleted.

setup.py

Lines changed: 41 additions & 96 deletions
Original file line numberDiff line numberDiff line change
@@ -1,61 +1,29 @@
1-
#!/usr/bin/env python
2-
"""pyhdf: Python interface to the NCSA HDF4 library.
3-
4-
The pyhdf package wraps the functionality of the NCSA HDF version
5-
4 library inside a Python OOP framework. The SD (scientific dataset),
6-
VS (Vdata) and V (Vgroup) APIs are currently implemented. SD datasets
7-
are read/written through numpy arrays. NetCDF files can also be read
8-
and modified with pyhdf.
9-
"""
10-
111
from __future__ import print_function
122

13-
DOCLINES = __doc__.split("\n")
14-
15-
# Allows bdist_egg to work if you have setuptools installed.
16-
# This import must be before the numpy.distutils import of setup.
17-
# Otherwise, no harm.
18-
try:
19-
import setuptools
20-
except:
21-
pass
22-
23-
from numpy.distutils.core import setup, Extension
24-
253
import sys
264
import os
275
import os.path as path
286
import shlex
297

30-
CLASSIFIERS = """\
31-
Development Status :: 5 - Production/Stable
32-
Intended Audience :: Science/Research
33-
Intended Audience :: Developers
34-
License :: OSI Approved
35-
Programming Language :: C
36-
Programming Language :: Python
37-
Programming Language :: Python :: 3
38-
Topic :: Software Development
39-
Topic :: Scientific/Engineering
40-
Operating System :: Microsoft :: Windows
41-
Operating System :: POSIX
42-
Operating System :: Unix
43-
Operating System :: MacOS
44-
"""
8+
from setuptools import Extension, setup
9+
import numpy as np
10+
4511

4612
def _find_args(pat, env):
47-
val = os.environ.get(env, [])
48-
if val:
49-
val = val.split(os.pathsep)
13+
try:
14+
val = os.environ[env].split(os.pathsep)
15+
except KeyError:
16+
val = []
5017
try:
5118
k = sys.argv.index(pat)
52-
val.extend(sys.argv[k+1].split(os.pathsep))
19+
val.extend(sys.argv[k + 1].split(os.pathsep))
5320
del sys.argv[k]
5421
del sys.argv[k]
5522
except ValueError:
5623
pass
5724
return val
5825

26+
5927
# A Debian based linux distribution might be using libhdf4 (contains netcdf
6028
# routines) or libhdf4-alt (does not contain netcdf routines). This function
6129
# tries to detect if the alt version should be used.
@@ -67,25 +35,29 @@ def _use_hdf4alt(libdirs):
6735
libdirs.append("/usr/local/lib")
6836
libdirs.append("/lib")
6937
for d in libdirs:
70-
if os.path.exists(os.path.join(d, "libdfalt.so")) and \
71-
os.path.exists(os.path.join(d, "libmfhdfalt.so")):
38+
if os.path.exists(os.path.join(d, "libdfalt.so")) and os.path.exists(
39+
os.path.join(d, "libmfhdfalt.so")
40+
):
7241
return True
7342
return False
7443

75-
include_dirs = _find_args('-i', 'INCLUDE_DIRS')
76-
library_dirs = _find_args('-l', 'LIBRARY_DIRS')
77-
szip_installed = 'SZIP' in os.environ
78-
compress = 'NO_COMPRESS' not in os.environ
44+
45+
include_dirs = _find_args("-i", "INCLUDE_DIRS")
46+
library_dirs = _find_args("-l", "LIBRARY_DIRS")
47+
szip_installed = "SZIP" in os.environ
48+
compress = "NO_COMPRESS" not in os.environ
7949
extra_link_args = None
8050
if "LINK_ARGS" in os.environ:
8151
extra_link_args = shlex.split(os.environ["LINK_ARGS"])
8252

8353

84-
msg = 'Cannot proceed without the HDF4 library. Please ' \
85-
'export INCLUDE_DIRS and LIBRARY_DIRS as explained' \
86-
'in the INSTALL file.'
54+
msg = (
55+
"Cannot proceed without the HDF4 library. Please "
56+
"export INCLUDE_DIRS and LIBRARY_DIRS as explained"
57+
"in the INSTALL file."
58+
)
8759

88-
if sys.platform.startswith('linux'):
60+
if sys.platform.startswith("linux"):
8961
# libhdf4 header files on most linux distributations
9062
# (e.g. Debian/Ubuntu, CentOS) are stored in /usr/include/hdf
9163
d = "/usr/include/hdf/"
@@ -97,67 +69,40 @@ def _use_hdf4alt(libdirs):
9769
print("\n******\n%s not found\n******\n\n" % p)
9870
raise RuntimeError(msg)
9971

100-
if sys.platform == 'win32':
101-
# Find DLL path
102-
dll_path = ''
103-
for p in library_dirs:
104-
if path.exists(p + os.path.sep + "mfhdf.dll"):
105-
dll_path = p + os.path.sep
106-
break
107-
if dll_path == '':
108-
print("library_dirs =", library_dirs)
109-
raise RuntimeError("Cannot find required HDF4 DLLs -- check LIBRARY_DIRS")
110-
111-
if sys.platform == 'win32':
112-
libraries = ["mfhdf", "hdf", "xdr" ]
72+
if sys.platform == "win32":
73+
libraries = ["mfhdf", "hdf", "xdr"]
11374
elif _use_hdf4alt(library_dirs):
11475
libraries = ["mfhdfalt", "dfalt"]
11576
else:
11677
libraries = ["mfhdf", "df"]
11778

11879
if szip_installed:
11980
extra_compile_args = []
120-
if sys.platform == 'win32':
81+
if sys.platform == "win32":
12182
libraries += ["szlib"]
12283
else:
12384
libraries += ["sz"]
12485
else:
12586
extra_compile_args = ["-DNOSZIP"]
126-
if sys.platform == 'win32':
87+
if sys.platform == "win32":
12788
libraries += ["libjpeg", "zlib", "ws2_32"]
12889
else:
12990
libraries += ["jpeg", "z"]
13091

13192
if not compress:
13293
extra_compile_args += ["-DNOCOMPRESS"]
13394

134-
_hdfext = Extension('pyhdf._hdfext',
135-
sources = ["pyhdf/hdfext_wrap.c"],
136-
include_dirs = include_dirs,
137-
extra_compile_args = extra_compile_args,
138-
library_dirs = library_dirs,
139-
extra_link_args=extra_link_args,
140-
libraries = libraries,
141-
)
142-
143-
if sys.platform == 'win32':
144-
data_files = [("pyhdf", [dll_path + x for x in ["mfhdf.dll", "hdf.dll"]])]
145-
else:
146-
data_files = []
147-
148-
setup(name = 'pyhdf',
149-
maintainer = 'pyhdf authors',
150-
author = 'Andre Gosselin et al.',
151-
description = DOCLINES[0],
152-
keywords = ['hdf4', 'netcdf', 'numpy', 'python', 'pyhdf'],
153-
license = 'MIT',
154-
long_description = "\n".join(DOCLINES[2:]),
155-
url = 'https://github.com/fhs/pyhdf',
156-
version = '0.10.5',
157-
packages = ['pyhdf'],
158-
ext_modules = [_hdfext],
159-
data_files = data_files,
160-
provides = ['pyhdf'],
161-
classifiers = [_f for _f in CLASSIFIERS.split('\n') if _f],
162-
platforms = ["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
163-
)
95+
96+
setup(
97+
ext_modules=[
98+
Extension(
99+
name="pyhdf._hdfext",
100+
sources=["pyhdf/hdfext_wrap.c"],
101+
include_dirs=[np.get_include()] + include_dirs,
102+
extra_compile_args=extra_compile_args,
103+
library_dirs=library_dirs,
104+
extra_link_args=extra_link_args,
105+
libraries=libraries,
106+
),
107+
],
108+
)

0 commit comments

Comments
 (0)