"""Package to build f2py modules 1) For a simple setup the typical use may be - all f90 code in library.f90 - f2py interface module interface.f90 the in your __init__.py of the module call from build import BaseBuild as Build Build(library=True).run() Interface routines can be very simple, just 'use' module, define variables, call function/subroutine. Easiest may be to have all be subroutines. Consult f2py doc for special f2py features such as dependent varibale definitions. f2py does not like non-standard data types; the build module defines standard ieee type names such as real64 and int32. Please use these as kinds. You may define your own and add to the .f2cmap file that is created by default by build. You can access allocated arrays from a module if the module is used by the interface. You cannot return arrays, however, that are locally allocated in a subroutine. Ways around are to acllocate data in a modue and then can be accessed from python if module is linked with interface, or to created an access routine that returns the data in a numpay array passed to the routine (since you can't locally allocate dynamically) or passing just array dimension to routine to have an automatic array. Just at the time of this wrting, allocatable arrays did nto work for that putpose. 2) for even simpler cases you may put all f90 code just into interface.f90, but there is a lot of Fortran features f2py may not like. 3) For more advanced uses create derived class Build(BaseBuild) Build().run() from _interface import * Once and use 'finegrained' selection of what parts of the inteface file(s) be use to create f2py interface: Usually, sources should be list of filenames or Path objects But it may also be a dictinary with - a key 'source' for source filename or path - a keys for 'only' and 'skip' with a list/tuple of subrotune names for each Example: kwargs.setdefault('sources', (dict(source='interface.f90', only=('add_stardata_',)),)) (but what may alos work is just kwargs.setdefault('sources', dict(source='interface.f90', only='add_stardata_')) ) This may allow to get around creating a library but have just one fortran file. Generally, though, for use of modules, better use the library interface To avoide re-compilation in case compiler check fails add to .bashrc export BUILD_CHECK_COMPILER=FALSE """ import os import subprocess import importlib import sys import shutil import platform import re import multiprocessing import contextlib from itertools import zip_longest, chain from shutil import rmtree from glob import iglob from pathlib import Path from importlib.machinery import EXTENSION_SUFFIXES import numpy as np # a sample file SAMPLE_MAKEFILE = r""" $(SOURCE) ?= . AR ?= ar .DEFAULT_GOAL := library.a LIBRARY_OBJECTS = library.o library.o: ${SOURCE}/library.f90 gfortran -c -Ofast -fPIC -o library.o ${SOURCE}/library.f90 library.a: $(LIBRARY_OBJECTS) rm -f library.a $(AR) cvr $@ $(LIBRARY_OBJECTS) .PHONY: clean clean: -rm -f *.o *.a *.mod *.smod *~ \#*\# .*~ .\#* """ F2CMAP=""" { 'real': {'real32': 'float', 'real64': 'double', 'real128': 'long_double'}, 'integer': {'int8': 'signed_char', 'int16': 'short', 'int32': 'int', 'int64': 'long', 'int128': 'long_long'}, 'complex': {'comp32': 'complex_float', 'comp64': 'complex_double', 'comp128': 'complex_long_double'}, 'character': {'char8' : 'char'}, } """ @contextlib.contextmanager def chdir(path = None): """ Context managet to work in provided directory. """ cwd = os.getcwd() try: if path is not None: os.chdir(path) yield finally: os.chdir(cwd) def get_makefile_deps(makefile, var = 'LIBRARY_OBJECTS'): lines = Path(makefile).expanduser().read_text() x = re.findall(fr'(?ms)^{var}\s*=((?:\s*(?:\\)?\s+\w+\.o)+)', lines)[0] x = re.findall(r'(\w+\.o)', x) return tuple(x) class BaseBuild(): """ Class to build module binaries. Having this in a class and not storing instance keeps namespace clean. """ f2py_options = ( f'f2py{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}', f'f2py{sys.version_info.major}.{sys.version_info.minor}', f'f2py{sys.version_info.major}', f'f2py' ) for f in f2py_options: if shutil.which(f): f2py_exec = f break else: raise Exception('f2py not found.') f2pycomp = ( f2py_exec, '--verbose', # '--debug-capi', ) f2pycomp1 = () f2pycomp2 = () link_libraries = () fcomp = 'gfortran -v' def _defaults(self, defaults): pyversion = ( sys.version_info.major * 10000 + sys.version_info.minor * 100 + sys.version_info.micro) # we may want to add some checks ... self.__dict__.update(defaults) # set defaults where not provided # doing some guessing ... file = sys.modules[self.__module__].__file__ parent = self.__module__.rsplit('.', 1)[0] if self.__class__.__name__ == 'BaseBuild': for i in range(10): f = sys._getframe(i).f_globals['__file__'] if f != file: file = f parent = sys._getframe(i).f_globals['__package__'] break if not hasattr(self, 'path'): self.path = Path(file).expanduser().resolve().parent if not hasattr(self, 'package'): self.package = 'interface' # info only if not hasattr(self, 'parent'): self.parent = parent # TODO - better config for sources if not hasattr(self, 'macros'): self.macros = dict() if not hasattr(self, 'sources'): self.sources = (f'{self.package}.f90',) if isinstance(self.sources, (str, dict)): self.sources = (self.sources,) if not hasattr(self, 'process'): self.process = () if not hasattr(self, 'intermediate_path'): self.intermediate_path = None if not hasattr(self, 'include_libraries'): self.include_libraries = () # e.g., 'uuid' if not hasattr(self, 'include_paths'): self.include_paths = () if not hasattr(self, 'module'): self.module = f'_{self.package}' if not hasattr(self, 'signature_file'): self.signature_file = f'{self.module}.pyf' if not hasattr(self, 'endian'): self.endian = 'big' if not hasattr(self, 'debug'): self.debug = False if not hasattr(self, 'debug_flags'): self.debug_flags = ( '-g', '-O0', '-finit-real=snan', '-finit-integer=2147483647', '-finit-derived', '-ffpe-trap=invalid,zero,overflow,underflow', '-fcheck=all', '-Wall', '-fbacktrace', ) if not hasattr(self, 'compile_flags'): self.compile_flags = ( '-fPIC', '-fno-second-underscore', ) if self.debug == False: self.compile_flags += ( '-O3', '-funroll-loops', ) else: if self.debug_flags is not None: self.compile_flags += self.debug_flags if self.endian == 'big': self.compile_flags += ( '-fconvert=big-endian', ) if not hasattr(self, 'f2cmap'): self.f2cmap = Path(self.path) / '.f2py_f2cmap' # todo - multi-libraries may need more features for native Kepler-like support if not hasattr(self, 'library'): self.library = False if not hasattr(self, 'libraries'): self.libraries = dict( lib = dict( update = True, build_dir = '_library', source_path = self.path, name = 'library.a', files = ('library.f90',), makefile_path = self.path, makefile = 'Makefile', makeflags = [], env = None, ), ) # f2py build path if not hasattr(self, 'build_path'): if pyversion >= 131200: self.build_path = Path(self.path) / self.libraries['lib']['build_dir'] else: self.build_path = Path(self.path) / '_build' if not hasattr(self, 'clean_patterns'): self.clean_patterns = ('_library*', '*.pyf', '*.so', ) # e.g., ('_kepler*',) if not hasattr(self, 'clean_build_path'): self.clean_build_path = False if not hasattr(self, 'check_compiler'): c = os.getenv('BUILD_CHECK_COMPILER') if c is None or c is None or c.upper() in ('TRUE', '1', 'YES', 'Y',): self.check_compiler = True elif c in ('FALSE', '0', 'NO', 'N',): self.check_compiler = False else: raise AttributeError(f'invalid environemnt variable BUILD_CHECK_COMPILER = {c}') if hasattr(self, 'openmp'): self.link_libraries += ('gomp',) self.compile_flags += ('-fopenmp',) # does not yet work # from numpy.f2py import f2py2e # def f2py(self, args): # result = self.f2py2e.run_main(args) # print(result) def __init__(self, verbose=True, ncpu=None, **defaults): """ init routine, likely to be called before doing own initialisations """ self.verbose = verbose self.ncpu = ncpu self._defaults(defaults) def f2py(self, args, verbose=None): assert np.all([isinstance(a, str) for a in args]), f' [{self.__class__.__name__}] all arguments need to be of type str: {args}' assert np.all([len(a) > 0 for a in args]), f' [{self.__class__.__name__}] all arguments need to be of length > 0: {args}' args = [*self.f2pycomp] + list(args) if verbose is None: verbose = self.verbose if verbose: print(' [DEBUG][f2py] Call: ' + args[0] + ' "' + '" "'.join(args[1:]) + '"') result = subprocess.run( args, check = True, shell = False) if verbose: print(f' [DEBUG][f2py] Result: {result}') def f2bin(self, args, verbose=None): args = [*(self.fcomp.split())] + list(args) if verbose is None: verbose = self.verbose if verbose: print(' [DEBUG][f2bin] ' + ' '.join(args)) result = subprocess.run( args, shell = True, check = True) if verbose: print(f' [DEBUG][f2bin] Result: {result}') def run(self, verbose=None, ncpu=None, check_compiler=None): """ execute tests and build """ path = Path(self.path).expanduser() if verbose is None: verbose = self.verbose if ncpu is None: if self.debug: ncpu = 1 else: ncpu = self.ncpu kw = dict(verbose=verbose, ncpu=ncpu) if (self.build_library_check(**kw) or self.build_check(check_compiler=check_compiler, **kw)): self.build_module(**kw) if self.clean_build_path: if self.build_path is not None: bp = Path(self.build_path) if not bp.is_absolute: bp = path / bp rmtree(bp, rmtree(f, onerror=lambda f,p,e: print(f' [run] - could not delete {p}: {e!s}'))) def make_clean(self): """ call clean from Makefile """ with chdir(self.path): try: result = subprocess.run(['make', 'clean'], check = True) except subprocess.CalledProcessError as e: raise Exception(f"make terminated with error {e!s}") print(result) def clean_libraries(self, verbose=None): """ call clean from Makefile for the librairies in their respective build directory """ if self.library is None: return if verbose is None: verbose = self.verbose for name,l in self.libraries.items(): path = Path(self.path).expanduser() p = l['build_dir'] if p is not None: p = Path(p) if not p.is_absolute(): p = path / p if not p.exists(): if verbose: print(f' [DEBUG] [{name}] directory {path} does not exist.') continue if l['makefile_path'] is not None: makefile = Path(l['makefile_path']) / l['makefile'] else: makefile = Path(self.path) / l['makefile'] if not makefile.exists(): if verbose: print(f' [DEBUG] [{name}] makefile {makefile} does not exist.') continue with chdir(p): cmd = ['make', 'clean'] cmd += ['-f', str(makefile)] + l.get('makeflags', []) if l.get('source_path', None) is not None: cmd += [f'SOURCE={l["source_path"]!s}'] if verbose: print(f' [DEBUG] [{name}] call: {cmd}') subprocess.run(cmd, shell=False, check=True) def clean(self): """ clean files this should delete build and module paths """ path = Path(self.path).expanduser() clean_patterns = self.clean_patterns if clean_patterns is None: clean_patterns = () if isinstance(clean_patterns, (str, Path)): clean_pattens = (clean_patterns,) clean_patterns = (Path(p) for p in clean_patterns) clean_patterns = (p if p.is_absolute() else path / p for p in clean_patterns) for p in clean_patterns: for f in iglob(str(p)): f = Path(f) if f.is_file() or f.is_symlink(): f.unlink() else: rmtree(f, onerror=lambda f,p,e: print(f' [clean] [info] [don\'t worry, be happy] Could not delete {p}: {e!s}')) if self.build_path is not None: p = Path(self.build_path) if not p.is_absolute(): p = path / p rmtree(p, onerror=lambda f,p,e: print(f' [clean] [info] [don\'t worry, be happy] - Could not delete {p}: {e!s}')) if self.library: for name,l in self.libraries.items(): p = l['build_dir'] if p is not None: p = Path(p) if not p.is_absolute(): p = path / p if p.exists(): rmtree(p, onerror=lambda f,p,e: print(f' [clean] [info] [don\'t worry, be happy] - could not delete {p}: {e!s}')) else: p = path # add specific files to delete ... for p in self.process: if isinstance(p, str): s = p i = f'_{p}' else: s, i = p if self.intermediate_path is not None: m = Path(self.intermediate_path) if not m.is_absolute(): m = Path(self.path).expanduser() / m i = m / i else: i = path / i try: i.unlink() except FileNotFoundError: pass try: rmtree( os.path.join(path, '__pycache__'), onerror=lambda f,p,e: print(f' [clean] - could not delete {p}: {e!s}')) print(f' [clean] Cleared __pycache__') except: print(f' [clean] FAILED cleaning __pycache__') def make(self, *args, **kwargs): self.run(*args, **kwargs) importlib.invalidate_caches() module = importlib.import_module('.' + self.module, self.parent) importlib.reload(module) def new(self, verbose=None): self.clean_libraries(verbose=verbose) self.clean(verbose=verbose) self.make(verbose=verbose) def build_module(self, verbose=None, ncpu=None, check_compiler=None): """ Build python module binary library. We also do a test of the executable version """ if ncpu is None: ncpu = self.ncpu if verbose is None: verbose = self.verbose cwd = os.getcwd() path = Path(self.path).expanduser() os.chdir(path) extra_flags = tuple() if self.f2cmap is not None: if not Path(self.f2cmap).exists(): Path(self.f2cmap).write_text(F2CMAP) extra_flags = ('--f2cmap', str(self.f2cmap),) sources = self.sources if isinstance(sources, (str, dict, Path)): sources = (sources,) assert isinstance(sources, (list, tuple)) sources = list(self.sources) skips = list() onlys = list() for i,s in enumerate(sources): if isinstance(s, dict): sources[i] = s.get('source', None) skips.append(s.get('skip', None)) onlys.append(s.get('only', None)) else: skips.append(None) onlys.append(None) for p in self.process: if isinstance(p, str): s = p i = f'_{p}' else: s, i = p if self.intermediate_path is not None: m = Path(self.intermediate_path) if not m.is_absolute(): m = path / m i = m / i else: i = path / i self.process_macros(s, i, self.macros) self.process_includes(i, i) sources[sources.index(s)] = i sources = [str(s) for s in sources] source_strings = list() for s,k,o in zip(sources, skips, onlys): source_strings.append(s) if k is not None: if isinstance(k, str): k = (k,) source_strings.append('skip:') source_strings.extend(k) source_strings.append(':') if o is not None: if isinstance(o, str): o = (o,) source_strings.append('only:') source_strings.extend(o) source_strings.append(':') try: args = [ '-m', str(self.module), ] if self.build_path is not None: bp = Path(self.build_path).expanduser() if not bp.is_absolute: bp = path / bp if not bp.exists(): bp.mkdir(parents=True, exist_ok=True) if verbose: print(f' [DEBUG][build] creating directory {path}') else: bp = path args += [ '-h', str(bp / self.signature_file), ] if len(self.include_paths) > 0: args += [ '--include-paths', ':'.join([str(p) for p in self.include_paths]), ] args += [ *extra_flags, *self.f2pycomp1, *source_strings, '--overwrite-signature', ] self.f2py(args) except subprocess.CalledProcessError: raise Exception("creating f2py signature failed") libraries = list() if self.library: for name,l in self.libraries.items(): if l['build_dir'] is not None: p = path / l['build_dir'] else: p = path libraries += [p / l['name']] try: args = [] if self.build_path is not None: args = [ # '--debug', '--build-dir', str(bp), ] fflags = [ *chain( self.compile_flags, tuple(f'-I{p}' for p in self.include_paths), ) ] # include the paths from related libraries if self.library: for name,l in self.libraries.items(): if l['build_dir'] is not None: p = path / l['build_dir'] else: p = path fflags.append(f'-I{p!s}') fflags = ' '.join(fflags) if len(fflags) > 0: args += [ f'--f90flags={fflags}', f'--f77flags={fflags}', ] if len(self.include_paths) > 0: args += [ '--include-paths', ':'.join(str(p) for p in self.include_paths), ] args += [ *(f'-l{l}' for l in self.include_libraries), *extra_flags, *self.f2pycomp2, '-c', '-m', str(self.module), # '-DF2PY_USE_PYTHON_TLS', ] if self.library: for name,l in self.libraries.items(): if l['build_dir'] is not None: p = path / l['build_dir'] else: p = path args += [str(p / l['name'])] args += [ *source_strings, ] args += [ str(bp / self.signature_file), ] for l in self.link_libraries: args += [f"-l{l}"] self.f2py(args) except subprocess.CalledProcessError as e: raise Exception("creating module failed") from e os.chdir(cwd) def build_library_check(self, verbose=None, ncpu=None): """ CUSTOM check whether required libraries are up to date return value is whether library needs to be built """ if verbose is None: verbose = self.verbose if self.library == False: return False library_time = 0 update = False if platform.system() == 'Darwin': if os.getenv('AR') is None: os.environ['AR'] = '/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ar' for name,l in self.libraries.items(): path = Path(self.path) if l['build_dir'] is not None: path = path / l['build_dir'] try: filename = path / l['name'] library_time = os.stat(filename).st_mtime if l['update'] == False: continue except FileNotFoundError: library_time = 0 if l['makefile_path'] is not None: makefile = Path(l['makefile_path']) / l['makefile'] else: makefile = Path(self.path) / l['makefile'] if not makefile.exists(): makefile.write_text(SAMPLE_MAKEFILE) last_time = os.stat(makefile).st_mtime for f in l['files']: if l.get('source_path', None) is not None: f = Path(l['source_path']) / f else: f = Path(self.path) / f for fx in iglob(str(f)): last_time = max(last_time, os.stat(fx).st_mtime) if last_time > library_time: cwd = os.getcwd() if verbose: print(f' [DEBUG] [{name}] compiling in {path}') if not path.exists(): path.mkdir(parents=True) if verbose: print(f' [DEBUG] [{name}] creating directory {path}') with chdir(path): if ncpu is None: ncpu = multiprocessing.cpu_count() if ncpu is Ellipsis: ncpu = 2**10 cmd = ['make'] if ncpu > 1: cmd += ['-j', f'{ncpu:d}'] cmd += ['-f', str(makefile)] + l.get('makeflags', []) if l.get('source_path', None) is not None: cmd += [f'SOURCE={l["source_path"]!s}'] if self.debug: cmd += [f'F90={self.fcomp} -c -fPIC {" ".join(self.debug_flags)}'] if verbose: print(f' [DEBUG] [{name}] call: {cmd}') kw = dict() env = l.get('env', None) if env is not None: kw['env'] = env subprocess.run(cmd, shell=False, check=True, **kw) update = True return update def build_check(self, verbose=None, ncpu=None, check_compiler=None): """ check whether build is OK """ path = Path(self.path).expanduser() if verbose is None: verbose = self.verbose so_file_base = path / self.module for extension in EXTENSION_SUFFIXES: so_file = so_file_base.with_suffix(extension) if so_file.exists(): break else: if verbose: print(' [DEBUG][build_check] so file does not exist.') return True source_files = list() sources = self.sources if isinstance(sources, (str, dict)): sources = (sources,) for s in sources: if isinstance(s, dict): s = s['source'] s = Path(s) if not s.is_absolute(): s = path / s source_files.append(s) so_file_date = os.stat(so_file).st_mtime for f in source_files: if (so_file_date < os.stat(f).st_mtime): if verbose: print(f' [DEBUG][build_check] {f} newer than {so_file}.') return True for p in self.process: if isinstance(p, str): s = p i = f'_{p}' else: s, i = p if self.intermediate_path is not None: m = Path(self.intermediate_path) if not m.is_absolute(): m = path / m i = m / i else: i = path / i if not i.exists(): return True try: if self.parent in ('', None): module = importlib.import_module(self.module, self.parent) else: module = importlib.import_module('.' + self.module, self.parent) except ImportError as e: if verbose: print(f' [DEBUG][build_check] Import Error: {e}') # sys.exit() return True # check f2py numpy version (since 1.20) try: np_f2py_version = module.__f2py_numpy_version__ print(f' [DEBUG] f2py version {np_f2py_version}') np_version = np.__version__ if np_version != np_f2py_version: if verbose: print(f' [DEBUG][build_check] Library/numpy version mismatch:') print(f' [DEBUG][build_check] Library Version {np_f2py_version}') print(f' [DEBUG][build_check] Numpy Version {np_version}') return True except Exception as e: print(f' [DEBUG][build_check] module f2py used < 1.20? {e}') if check_compiler is None: check_compiler = self.check_compiler if not check_compiler: return False # check for changed compiler version # (works on Fedora 18+) # other updates are welcome! # seems to have changed with gcc 9.0 # entirely different in MacOSX - here we require Homebrew try: if platform.system() == 'Darwin': result = subprocess.check_output("gfortran --version", shell=True).decode('ASCII', errors='ignore') compiler_version = 'UNKNOWN' for line in result.splitlines(): x = re.findall(r'GCC (\d+\.\d+\.\d+(?:_\d+)?)\)', line) if len(x) > 0: compiler_version = x[0] break result = subprocess.check_output(f"strings - {so_file!s} | grep '/gcc/'", shell = True).decode('ASCII', errors='ignore') library_version = [] try: for line in result.splitlines(): x = re.findall(r'/gcc/(\d+\.\d+\.\d+(?:_\d+)?)/', line) # Homebewr 11.2.0_2 if len(x) > 0: library_version.append(x[0]) except: pass try: library_version.append(result.splitlines()[0].split(' ', 2)[-1]) # Homebrew GCC 10.2.0_4 except: pass if not compiler_version in library_version: result = subprocess.check_output(f"strings - {so_file!s} | grep 'COMPILER_VERSION'", shell = True).decode('ASCII', errors='ignore') try: for line in result.splitlines(): x = re.findall(r'GCC version (\d+\.\d+\.\d+(?:_\d+)?) ', line) # Homebrew 11.2.0 - Intel if len(x) > 0: library_version.append(x[0]) except: pass compiler_version = compiler_version.split('_')[0] elif platform.system() == 'Linux': result = subprocess.check_output("gcc --version", shell=True).decode('ASCII', errors='ignore') compiler_version = (result.splitlines()[0]).split(' ', 2)[-1] result = subprocess.check_output(f"strings - {so_file!s} | grep GCC:", shell = True).decode('ASCII', errors='ignore') library_version = [] library_version.append(result.splitlines()[0].split(' ', 2)[2]) # pre 9.0 library_version.append(result.splitlines()[0].split('GCC:')[1].split(' ', 2)[2]) # 9.1 if not compiler_version in library_version: if verbose: print(f' [DEBUG][build_check] Compiler/library version mismatch:') print(f' [DEBUG][build_check] Compiler Version {compiler_version}') print(f' [DEBUG][build_check] Library Version {library_version}') return True else: if verbose: print(f' [DEBUG][build_check] {compiler_version}, {library_version}') except: if verbose: print(f" [DEBUG][build_check] Compiler comparison failed for {so_file}.") return True return False def process_macros(self, infile, outfile, macros): """ use macro {DATA} """ source = Path(infile).expanduser().read_text() for k,v in macros.items(): source = source.replace(f'{{{k}}}', str(v)) Path(outfile).expanduser().write_text(source) def process_includes(self, infile, outfile): """ use include files {include [SOURCE/]filename.f90:section} to be replaced by !$PY:BEGIN:section !$PY:END:section If no section is provided, insert entire file. This allows multiple replacements from multiple files. """ path = Path(self.path).expanduser() source = Path(infile).expanduser().read_text() includes = re.findall(r'(?m)^( *\{insert\s[^\}]+\} *\n)', source) for i in includes: filesection = re.findall(r'{insert\s+(\S+)\}$', i)[0] x = filesection.split(':') if len(x) == 2: filename, section = x else: filename = x[0] section = None if filename.count('[SOURCE]') > 0: for inc in self.include_paths: fn = Path(filename.replace('[SOURCE]', str(inc))) if not fn.is_absolute(): fn = path / fn if fn.exists(): filename = fn.expanduser() break include = filename.read_text() if section is not None: include = re.findall(f'(?ms)^\\s*\\!\\$PY:BEGIN:{section} *\n(.*\n) *\\!\\$PY:END:{section}', include)[0] source = source.replace(i, include) Path(outfile).expanduser().write_text(source) def build(*args, **kwargs): """ shortcut method for simple builds """ BaseBuild(*args, **kwargs).run()