desc stringlengths 3 26.7k | decl stringlengths 11 7.89k | bodies stringlengths 8 553k |
|---|---|---|
'Sets the default build system.'
| def __init__(self):
| self.build_system = 'generic'
|
'Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from.'
| def __call__(self, stage, url):
| if ('downloads.sourceforge.net/octave/' in url):
self.build_system = 'octave'
return
clues = [('/CMakeLists\\.txt$', 'cmake'), ('/configure$', 'autotools'), ('/configure\\.(in|ac)$', 'autoreconf'), ('/Makefile\\.am$', 'autoreconf'), ('/SConstruct$', 'scons'), ('/waf$', 'waf'), ('/setup\\.py$', 'python'), ('/NAMESPACE$', 'r'), ('/WORKSPACE$', 'bazel'), ('/Build\\.PL$', 'perlbuild'), ('/Makefile\\.PL$', 'perlmake'), ('/.*\\.pro$', 'qmake'), ('/(GNU)?[Mm]akefile$', 'makefile')]
if stage.archive_file.endswith('.zip'):
try:
unzip = which('unzip')
output = unzip('-lq', stage.archive_file, output=str)
except:
output = ''
else:
try:
tar = which('tar')
output = tar('--exclude=*/*/*', '-tf', stage.archive_file, output=str)
except:
output = ''
lines = output.split('\n')
for (pattern, bs) in clues:
if any((re.search(pattern, l) for l in lines)):
self.build_system = bs
break
|
'Create a stage object.
Parameters:
url_or_fetch_strategy
URL of the archive to be downloaded into this stage, OR
a valid FetchStrategy.
name
If a name is provided, then this stage is a named stage
and will persist between runs (or if you construct another
stage object later). If name is not provided, then this
stage will be given a unique name automatically.
mirror_path
If provided, Stage will search Spack\'s mirrors for
this archive at the mirror_path, before using the
default fetch strategy.
keep
By default, when used as a context manager, the Stage
is deleted on exit when no exceptions are raised.
Pass True to keep the stage intact even if no
exceptions are raised.'
| def __init__(self, url_or_fetch_strategy, name=None, mirror_path=None, keep=False, path=None, lock=True, search_fn=None):
| if isinstance(url_or_fetch_strategy, string_types):
self.fetcher = fs.from_url(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
else:
raise ValueError("Can't construct Stage without url or fetch strategy")
self.fetcher.set_stage(self)
self.default_fetcher = self.fetcher
self.search_fn = search_fn
self.skip_checksum_for_mirror = True
self.name = name
if (name is None):
self.name = (_stage_prefix + next(tempfile._get_candidate_names()))
self.mirror_path = mirror_path
if (path is not None):
self.path = path
else:
self.path = join_path(spack.stage_path, self.name)
self.keep = keep
self._lock = None
if lock:
if (self.name not in Stage.stage_locks):
sha1 = hashlib.sha1(self.name.encode('utf-8')).digest()
lock_id = prefix_bits(sha1, bit_length(sys.maxsize))
stage_lock_path = join_path(spack.stage_path, '.lock')
Stage.stage_locks[self.name] = llnl.util.lock.Lock(stage_lock_path, lock_id, 1)
self._lock = Stage.stage_locks[self.name]
|
'Entering a stage context will create the stage directory
Returns:
self'
| def __enter__(self):
| if (self._lock is not None):
self._lock.acquire_write(timeout=60)
self.create()
return self
|
'Exiting from a stage context will delete the stage directory unless:
- it was explicitly requested not to do so
- an exception has been raised
Args:
exc_type: exception type
exc_val: exception value
exc_tb: exception traceback
Returns:
Boolean'
| def __exit__(self, exc_type, exc_val, exc_tb):
| if ((exc_type is None) and (not self.keep)):
self.destroy()
if (self._lock is not None):
self._lock.release_write()
|
'Makes sure nothing weird has happened since the last time we
looked at path. Returns True if path already exists and is ok.
Returns False if path needs to be created.'
| def _need_to_create_path(self):
| if (not os.path.exists(self.path)):
return True
if (not os.path.isdir(self.path)):
os.unlink(self.path)
return True
if os.path.islink(self.path):
tmp_root = get_tmp_root()
if (tmp_root is not None):
real_path = os.path.realpath(self.path)
real_tmp = os.path.realpath(tmp_root)
if (real_path.startswith(real_tmp) and os.path.exists(real_path)):
return False
else:
os.unlink(self.path)
return True
else:
os.unlink(self.path)
return True
return False
|
'Possible archive file paths.'
| @property
def expected_archive_files(self):
| paths = []
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(self.path, os.path.basename(self.default_fetcher.url)))
if self.mirror_path:
paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
return paths
|
'Path to the source archive within this stage directory.'
| @property
def archive_file(self):
| for path in self.expected_archive_files:
if os.path.exists(path):
return path
else:
return None
|
'Returns the path to the expanded/checked out source code.
To find the source code, this method searches for the first
subdirectory of the stage that it can find, and returns it.
This assumes nothing besides the archive file will be in the
stage path, but it has the advantage that we don\'t need to
know the name of the archive or its contents.
If the fetch strategy is not supposed to expand the downloaded
file, it will just return the stage path. If the archive needs
to be expanded, it will return None when no archive is found.'
| @property
def source_path(self):
| if isinstance(self.fetcher, fs.URLFetchStrategy):
if (not self.fetcher.expand_archive):
return self.path
for p in [os.path.join(self.path, f) for f in os.listdir(self.path)]:
if os.path.isdir(p):
return p
return None
|
'Changes directory to the stage path. Or dies if it is not set
up.'
| def chdir(self):
| if os.path.isdir(self.path):
os.chdir(self.path)
else:
raise ChdirError(('Setup failed: no such directory: ' + self.path))
|
'Downloads an archive or checks out code from a repository.'
| def fetch(self, mirror_only=False):
| self.chdir()
fetchers = []
if (not mirror_only):
fetchers.append(self.default_fetcher)
self.skip_checksum_for_mirror = True
if self.mirror_path:
mirrors = spack.config.get_config('mirrors')
mirror_roots = [(root if root.endswith('/') else (root + '/')) for root in mirrors.values()]
urls = [urljoin(root, self.mirror_path) for root in mirror_roots]
digest = None
expand = True
extension = None
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
expand = self.default_fetcher.expand_archive
extension = self.default_fetcher.extension
self.skip_checksum_for_mirror = (not bool(digest))
for url in urls:
fetchers.insert(0, fs.URLFetchStrategy(url, digest, expand=expand, extension=extension))
if self.default_fetcher.cachable:
fetchers.insert(0, spack.fetch_cache.fetcher(self.mirror_path, digest, expand=expand, extension=extension))
def generate_fetchers():
for fetcher in fetchers:
(yield fetcher)
if (self.search_fn and (not mirror_only)):
dynamic_fetchers = self.search_fn()
for fetcher in dynamic_fetchers:
(yield fetcher)
for fetcher in generate_fetchers():
try:
fetcher.set_stage(self)
self.fetcher = fetcher
self.fetcher.fetch()
break
except spack.fetch_strategy.NoCacheError as e:
continue
except spack.error.SpackError as e:
tty.msg(('Fetching from %s failed.' % fetcher))
tty.debug(e)
continue
else:
errMessage = ('All fetchers failed for %s' % self.name)
self.fetcher = self.default_fetcher
raise fs.FetchError(errMessage, None)
|
'Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository.'
| def check(self):
| if ((self.fetcher is not self.default_fetcher) and self.skip_checksum_for_mirror):
tty.warn('Fetching from mirror without a checksum!', 'This package is normally checked out from a version control system, but it has been archived on a spack mirror. This means we cannot know a checksum for the tarball in advance. Be sure that your connection to this mirror is secure!')
else:
self.fetcher.check()
|
'Changes to the stage directory and attempt to expand the downloaded
archive. Fail if the stage is not set up or if the archive is not yet
downloaded.'
| def expand_archive(self):
| archive_dir = self.source_path
if (not archive_dir):
self.fetcher.expand()
tty.msg(('Created stage in %s' % self.path))
else:
tty.msg(('Already staged %s in %s' % (self.name, self.path)))
|
'Changes directory to the expanded archive directory.
Dies with an error if there was no expanded archive.'
| def chdir_to_source(self):
| path = self.source_path
if (not path):
raise StageError('Attempt to chdir before expanding archive.')
else:
os.chdir(path)
if (not os.listdir(path)):
raise StageError(('Archive was empty for %s' % self.name))
|
'Removes the expanded archive path if it exists, then re-expands
the archive.'
| def restage(self):
| self.fetcher.reset()
|
'Creates the stage directory.
If get_tmp_root() is None, the stage directory is created
directly under spack.stage_path, otherwise this will attempt to
create a stage in a temporary directory and link it into
spack.stage_path.
Spack will use the first writable location in spack.tmp_dirs
to create a stage. If there is no valid location in tmp_dirs,
fall back to making the stage inside spack.stage_path.'
| def create(self):
| mkdirp(spack.stage_path)
remove_if_dead_link(self.path)
if self._need_to_create_path():
tmp_root = get_tmp_root()
if (tmp_root is not None):
tmp_dir = tempfile.mkdtemp('', _stage_prefix, tmp_root)
tty.debug(('link %s -> %s' % (self.path, tmp_dir)))
os.symlink(tmp_dir, self.path)
else:
mkdirp(self.path)
ensure_access(self.path)
|
'Removes this stage directory.'
| def destroy(self):
| remove_linked_tree(self.path)
try:
os.getcwd()
except OSError:
os.chdir(os.path.dirname(self.path))
|
'__reduce__ is used to serialize (pickle) ChildErrors.
Return a function to reconstruct a ChildError, along with the
salient properties we\'ll need.'
| def __reduce__(self):
| return (_make_child_error, (self.message, self.traceback, self.build_log, self.package_context))
|
'True if the namespace has a value, or if it\'s the prefix of one that
does.'
| def is_prefix(self, namespace):
| (first, sep, rest) = namespace.partition(self._sep)
if (not first):
return True
elif (first not in self._subspaces):
return False
else:
return self._subspaces[first].is_prefix(rest)
|
'True if this namespace has no children in the trie.'
| def is_leaf(self, namespace):
| (first, sep, rest) = namespace.partition(self._sep)
if (not first):
return bool(self._subspaces)
elif (first not in self._subspaces):
return False
else:
return self._subspaces[first].is_leaf(rest)
|
'True if there is a value set for the given namespace.'
| def has_value(self, namespace):
| (first, sep, rest) = namespace.partition(self._sep)
if (not first):
return (self._value is not None)
elif (first not in self._subspaces):
return False
else:
return self._subspaces[first].has_value(rest)
|
'Returns whether a value has been set for the namespace.'
| def __contains__(self, namespace):
| return self.has_value(namespace)
|
'Add a default argument to the command.'
| def add_default_arg(self, arg):
| self.exe.append(arg)
|
'Set an environment variable when the command is run.
Parameters:
key: The environment variable to set
value: The value to set it to'
| def add_default_env(self, key, value):
| self.default_env[key] = value
|
'The command-line string.
Returns:
str: The executable and default arguments'
| @property
def command(self):
| return ' '.join(self.exe)
|
'The executable name.
Returns:
str: The basename of the executable'
| @property
def name(self):
| return os.path.basename(self.path)
|
'The path to the executable.
Returns:
str: The path to the executable'
| @property
def path(self):
| return self.exe[0]
|
'Run this executable in a subprocess.
Parameters:
*args (str): Command-line arguments to the executable to run
Keyword Arguments:
env (dict): The environment to run the executable with
fail_on_error (bool): Raise an exception if the subprocess returns
an error. Default is True. The return code is available as
``exe.returncode``
ignore_errors (int or list): A list of error codes to ignore.
If these codes are returned, this process will not raise
an exception even if ``fail_on_error`` is set to ``True``
input: Where to read stdin from
output: Where to send stdout
error: Where to send stderr
Accepted values for input, output, and error:
* python streams, e.g. open Python file objects, or ``os.devnull``
* filenames, which will be automatically opened for writing
* ``str``, as in the Python string type. If you set these to ``str``,
output and error will be written to pipes and returned as a string.
If both ``output`` and ``error`` are set to ``str``, then one string
is returned containing output concatenated with error. Not valid
for ``input``
By default, the subprocess inherits the parent\'s file descriptors.'
| def __call__(self, *args, **kwargs):
| env_arg = kwargs.get('env', None)
if (env_arg is None):
env = os.environ.copy()
env.update(self.default_env)
else:
env = self.default_env.copy()
env.update(env_arg)
fail_on_error = kwargs.pop('fail_on_error', True)
ignore_errors = kwargs.pop('ignore_errors', ())
if isinstance(ignore_errors, int):
ignore_errors = (ignore_errors,)
input = kwargs.pop('input', None)
output = kwargs.pop('output', None)
error = kwargs.pop('error', None)
if (input is str):
raise ValueError('Cannot use `str` as input stream.')
def streamify(arg, mode):
if isinstance(arg, string_types):
return (open(arg, mode), True)
elif (arg is str):
return (subprocess.PIPE, False)
else:
return (arg, False)
(ostream, close_ostream) = streamify(output, 'w')
(estream, close_estream) = streamify(error, 'w')
(istream, close_istream) = streamify(input, 'r')
quoted_args = [arg for arg in args if re.search('^"|^\\\'|"$|\\\'$', arg)]
if quoted_args:
tty.warn('Quotes in command arguments can confuse scripts like configure.', 'The following arguments may cause problems when executed:', str('\n'.join([(' ' + arg) for arg in quoted_args])), "Quotes aren't needed because spack doesn't use a shell.", 'Consider removing them')
cmd = (self.exe + list(args))
cmd_line = ("'%s'" % "' '".join(map((lambda arg: arg.replace("'", '\'"\'"\'')), cmd)))
tty.debug(cmd_line)
try:
proc = subprocess.Popen(cmd, stdin=istream, stderr=estream, stdout=ostream, env=env)
(out, err) = proc.communicate()
rc = self.returncode = proc.returncode
if (fail_on_error and (rc != 0) and (rc not in ignore_errors)):
raise ProcessError(('Command exited with status %d:' % proc.returncode), cmd_line)
if ((output is str) or (error is str)):
result = ''
if (output is str):
result += out.decode('utf-8')
if (error is str):
result += err.decode('utf-8')
return result
except OSError as e:
raise ProcessError(('%s: %s' % (self.exe[0], e.strerror)), ('Command: ' + cmd_line))
except subprocess.CalledProcessError as e:
if fail_on_error:
raise ProcessError(str(e), ('\nExit status %d when invoking command: %s' % (proc.returncode, cmd_line)))
finally:
if close_ostream:
ostream.close()
if close_estream:
estream.close()
if close_istream:
istream.close()
|
'Get the name of the hash function this Checker is using.'
| @property
def hash_name(self):
| return self.hash_fun().name
|
'Read the file with the specified name and check its checksum
against self.hexdigest. Return True if they match, False
otherwise. Actual checksum is stored in self.sum.'
| def check(self, filename):
| self.sum = checksum(self.hash_fun, filename, block_size=self.block_size)
return (self.sum == self.hexdigest)
|
'Store mappings as OrderedDicts instead of as regular python
dictionaries to preserve file ordering.'
| def construct_mapping(self, node, deep=False):
| if (not isinstance(node, MappingNode)):
raise ConstructorError(None, None, ('expected a mapping node, but found %s' % node.id), node.start_mark)
mapping = syaml_dict()
for (key_node, value_node) in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError as exc:
raise ConstructorError('while constructing a mapping', node.start_mark, ('found unacceptable key (%s)' % exc), key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
if (key in mapping):
raise ConstructorError('while constructing a mapping', node.start_mark, ('found already in-use key (%s)' % key), key_node.start_mark)
mapping[key] = value
mark(mapping, node)
return mapping
|
'Make the dumper NEVER print YAML aliases.'
| def ignore_aliases(self, _data):
| return True
|
'Return a list of hidden files used by the directory layout.
Paths are relative to the root of an install directory.
If the directory layout uses no hidden files to maintain
state, this should return an empty container, e.g. [] or (,).'
| @property
def hidden_file_paths(self):
| raise NotImplementedError()
|
'To be implemented by subclasses to traverse all specs for which there is
a directory within the root.'
| def all_specs(self):
| raise NotImplementedError()
|
'Implemented by subclasses to return a relative path from the install
root to a unique location for the provided spec.'
| def relative_path_for_spec(self, spec):
| raise NotImplementedError()
|
'Creates the installation directory for a spec.'
| def create_install_directory(self, spec):
| raise NotImplementedError()
|
'Checks whether a spec is installed.
Return the spec\'s prefix, if it is installed, None otherwise.
Raise an exception if the install is inconsistent or corrupt.'
| def check_installed(self, spec):
| raise NotImplementedError()
|
'Get a dict of currently installed extension packages for a spec.
Dict maps { name : extension_spec }
Modifying dict does not affect internals of this layout.'
| def extension_map(self, spec):
| raise NotImplementedError()
|
'Ensure that ext_spec can be activated in spec.
If not, raise ExtensionAlreadyInstalledError or
ExtensionConflictError.'
| def check_extension_conflict(self, spec, ext_spec):
| raise NotImplementedError()
|
'Ensure that ext_spec can be removed from spec.
If not, raise NoSuchExtensionError.'
| def check_activated(self, spec, ext_spec):
| raise NotImplementedError()
|
'Add to the list of currently installed extensions.'
| def add_extension(self, spec, ext_spec):
| raise NotImplementedError()
|
'Remove from the list of currently installed extensions.'
| def remove_extension(self, spec, ext_spec):
| raise NotImplementedError()
|
'Return absolute path from the root to a directory for the spec.'
| def path_for_spec(self, spec):
| _check_concrete(spec)
path = self.relative_path_for_spec(spec)
assert (not path.startswith(self.root))
return os.path.join(self.root, path)
|
'Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.'
| def remove_install_directory(self, spec):
| path = self.path_for_spec(spec)
assert path.startswith(self.root)
if os.path.exists(path):
try:
shutil.rmtree(path)
except OSError as e:
raise RemoveFailedError(spec, path, e)
path = os.path.dirname(path)
while (path != self.root):
if os.path.isdir(path):
if os.listdir(path):
return
os.rmdir(path)
path = os.path.dirname(path)
|
'Write a spec out to a file.'
| def write_spec(self, spec, path):
| _check_concrete(spec)
with open(path, 'w') as f:
spec.to_yaml(f)
|
'Read the contents of a file and parse them as a spec'
| def read_spec(self, path):
| try:
with open(path) as f:
spec = spack.spec.Spec.from_yaml(f)
except Exception as e:
if spack.debug:
raise
raise SpecReadError(('Unable to read file: %s' % path), ('Cause: ' + str(e)))
spec._mark_concrete()
return spec
|
'Gets full path to spec file'
| def spec_file_path(self, spec):
| _check_concrete(spec)
return join_path(self.metadata_path(spec), self.spec_file_name)
|
'Gets full path to an installed package\'s extension file'
| def extension_file_path(self, spec):
| _check_concrete(spec)
return join_path(self.metadata_path(spec), self.extension_file_name)
|
'Get a dict<name -> spec> for all extensions currently
installed for this package.'
| def _extension_map(self, spec):
| _check_concrete(spec)
if (spec not in self._extension_maps):
path = self.extension_file_path(spec)
if (not os.path.exists(path)):
self._extension_maps[spec] = {}
else:
by_hash = self.specs_by_hash()
exts = {}
with open(path) as ext_file:
yaml_file = yaml.load(ext_file)
for entry in yaml_file['extensions']:
name = next(iter(entry))
dag_hash = entry[name]['hash']
prefix = entry[name]['path']
if (dag_hash not in by_hash):
raise InvalidExtensionSpecError(('Spec %s not found in %s' % (dag_hash, prefix)))
ext_spec = by_hash[dag_hash]
if (prefix != ext_spec.prefix):
raise InvalidExtensionSpecError(('Prefix %s does not match spec hash %s: %s' % (prefix, dag_hash, ext_spec)))
exts[ext_spec.name] = ext_spec
self._extension_maps[spec] = exts
return self._extension_maps[spec]
|
'Defensive copying version of _extension_map() for external API.'
| def extension_map(self, spec):
| _check_concrete(spec)
return self._extension_map(spec).copy()
|
'Calls the overridden method but prevents it from detecting Cray
compiler wrappers to avoid possible false detections. The detected
compilers come into play only if a user decides to work with the Cray\'s
frontend OS as if it was a regular Linux environment.'
| def find_compilers(self, *paths):
| env_bu = None
if ('PE_ENV' in os.environ):
env_bu = os.environ.copy()
prg_env = ('PrgEnv-' + os.environ['PE_ENV'].lower())
modulecmd = get_module_cmd()
exec compile(modulecmd('unload', prg_env, output=str, error=os.devnull), '<string>', 'exec')
clist = super(CrayFrontend, self).find_compilers(*paths)
if (env_bu is not None):
os.environ.clear()
os.environ.update(env_bu)
return clist
|
'Autodetects the mac version from a dictionary. Goes back as
far as 10.6 snowleopard. If the user has an older mac then
the version will just be a generic mac_os.'
| def __init__(self):
| mac_releases = {'10.6': 'snowleopard', '10.7': 'lion', '10.8': 'mountainlion', '10.9': 'mavericks', '10.10': 'yosemite', '10.11': 'elcapitan', '10.12': 'sierra'}
mac_ver = '.'.join(py_platform.mac_ver()[0].split('.')[:2])
name = mac_releases.get(mac_ver, 'macos')
super(MacOs, self).__init__(name, mac_ver)
|
'Register a version of a method for a particular sys_type.'
| def register(self, spec, method):
| self.method_list.append((spec, method))
if (not hasattr(self, '__name__')):
functools.update_wrapper(self, method)
else:
assert (self.__name__ == method.__name__)
|
'This makes __call__ support instance methods.'
| def __get__(self, obj, objtype):
| wrapped_method = self.method_list[0][1]
func = functools.wraps(wrapped_method)(functools.partial(self.__call__, obj))
return func
|
'Find the first method with a spec that matches the
package\'s spec. If none is found, call the default
or if there is none, then raise a NoSuchMethodError.'
| def __call__(self, package_self, *args, **kwargs):
| for (spec, method) in self.method_list:
if package_self.spec.satisfies(spec):
return method(package_self, *args, **kwargs)
if self.default:
return self.default(package_self, *args, **kwargs)
else:
superclass = super(package_self.__class__, package_self)
superclass_fn = getattr(superclass, self.__name__, None)
if callable(superclass_fn):
return superclass_fn(*args, **kwargs)
else:
raise NoSuchMethodError(type(package_self), self.__name__, spec, [m[0] for m in self.method_list])
|
'Return a key object (an index) that can be used to sort spec.
Sort is done in package order. We don\'t cache the result of
this function as Python\'s sort functions already ensure that the
key function is called at most once per sorted element.'
| def __call__(self, spec):
| spec_order = self._specs_for_pkg(self.pkgname, self.component, self.vpkg)
match_index = next((i for (i, s) in enumerate(spec_order) if spec.satisfies(s)), len(spec_order))
if ((match_index < len(spec_order)) and (spec_order[match_index] == spec)):
match_index -= 0.5
return match_index
|
'Given a package name, sort component (e.g, version, compiler, ...),
and an optional vpkg, return the list from the packages config.'
| @classmethod
def _order_for_package(cls, pkgname, component, vpkg=None, all=True):
| pkglist = [pkgname]
if all:
pkglist.append('all')
for pkg in pkglist:
pkg_entry = cls._packages_config.get(pkg)
if (not pkg_entry):
continue
order = pkg_entry.get(component)
if (not order):
continue
if (vpkg is not None):
order = order.get(vpkg)
if order:
return [str(s).strip() for s in order]
return []
|
'Given a sort order specified by the pkgname/component/second_key,
return a list of CompilerSpecs, VersionLists, or Specs for
that sorting list.'
| @classmethod
def _specs_for_pkg(cls, pkgname, component, vpkg=None):
| key = (pkgname, component, vpkg)
specs = cls._spec_cache.get(key)
if (specs is None):
pkglist = cls._order_for_package(pkgname, component, vpkg)
spec_type = _spec_type(component)
specs = [spec_type(s) for s in pkglist]
cls._spec_cache[key] = specs
return specs
|
'Whether specific package has a preferred vpkg providers.'
| @classmethod
def has_preferred_providers(cls, pkgname, vpkg):
| return bool(cls._order_for_package(pkgname, 'providers', vpkg, False))
|
'Return a VariantMap of preferred variants/values for a spec.'
| @classmethod
def preferred_variants(cls, pkg_name):
| for pkg in (pkg_name, 'all'):
variants = cls._packages_config.get(pkg, {}).get('variants', '')
if variants:
break
if (not isinstance(variants, string_types)):
variants = ' '.join(variants)
pkg = spack.repo.get(pkg_name)
spec = spack.spec.Spec(('%s %s' % (pkg_name, variants)))
return dict(((name, variant) for (name, variant) in spec.variants.items() if (name in pkg.variants)))
|
'The \'--version\' option seems to be the most consistent one
for intel compilers. Output looks like this::
icpc (ICC) 12.1.5 20120612
Copyright (C) 1985-2012 Intel Corporation. All rights reserved.
or::
ifort (IFORT) 12.1.5 20120612
Copyright (C) 1985-2012 Intel Corporation. All rights reserved.'
| @classmethod
def default_version(cls, comp):
| return get_compiler_version(comp, '--version', '\\((?:IFORT|ICC)\\) ([^ ]+)')
|
'The \'--version\' option works for clang compilers.
On most platforms, output looks like this::
clang version 3.1 (trunk 149096)
Target: x86_64-unknown-linux-gnu
Thread model: posix
On Mac OS X, it looks like this::
Apple LLVM version 7.0.2 (clang-700.1.81)
Target: x86_64-apple-darwin15.2.0
Thread model: posix'
| @classmethod
def default_version(cls, comp):
| if (comp not in cpr._version_cache):
compiler = Executable(comp)
output = compiler('--version', output=str, error=str)
ver = 'unknown'
match = re.search('^Apple LLVM version ([^ )]+)', output)
if match:
ver = (match.group(1) + '-apple')
else:
match = re.search('clang version ([^ )]+)', output)
if match:
ver = match.group(1)
cpr._version_cache[comp] = ver
return cpr._version_cache[comp]
|
'Set the DEVELOPER_DIR environment for the Xcode toolchain.
On macOS, not all buildsystems support querying CC and CXX for the
compilers to use and instead query the Xcode toolchain for what
compiler to run. This side-steps the spack wrappers. In order to inject
spack into this setup, we need to copy (a subset of) Xcode.app and
replace the compiler executables with symlinks to the spack wrapper.
Currently, the stage is used to store the Xcode.app copies. We then set
the \'DEVELOPER_DIR\' environment variables to cause the xcrun and
related tools to use this Xcode.app.'
| def setup_custom_environment(self, pkg, env):
| super(Clang, self).setup_custom_environment(pkg, env)
if ((not self.is_apple) or (not pkg.use_xcode)):
return
xcode_select = Executable('xcode-select')
real_root = xcode_select('--print-path', output=str).strip()
real_root = os.path.dirname(os.path.dirname(real_root))
developer_root = os.path.join(spack.stage_path, 'xcode-select', self.name, str(self.version))
xcode_link = os.path.join(developer_root, 'Xcode.app')
if (not os.path.exists(developer_root)):
tty.warn(('Copying Xcode from %s to %s in order to add spack wrappers to it. Please do not interrupt.' % (real_root, developer_root)))
copytree(real_root, developer_root, symlinks=True, ignore=ignore_patterns('AppleTV*.platform', 'Watch*.platform', 'iPhone*.platform', 'Documentation', 'swift*'))
real_dirs = ['Toolchains/XcodeDefault.xctoolchain/usr/bin', 'usr/bin']
bins = ['c++', 'c89', 'c99', 'cc', 'clang', 'clang++', 'cpp']
for real_dir in real_dirs:
dev_dir = os.path.join(developer_root, 'Contents', 'Developer', real_dir)
for fname in os.listdir(dev_dir):
if (fname in bins):
os.unlink(os.path.join(dev_dir, fname))
os.symlink(os.path.join(spack.build_env_path, 'cc'), os.path.join(dev_dir, fname))
os.symlink(developer_root, xcode_link)
env.set('DEVELOPER_DIR', xcode_link)
|
'The \'-qversion\' is the standard option fo XL compilers.
Output looks like this::
IBM XL C/C++ for Linux, V11.1 (5724-X14)
Version: 11.01.0000.0000
or::
IBM XL Fortran for Linux, V13.1 (5724-X16)
Version: 13.01.0000.0000
or::
IBM XL C/C++ for AIX, V11.1 (5724-X13)
Version: 11.01.0000.0009
or::
IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0
Version: 09.00.0000.0017'
| @classmethod
def default_version(self, comp):
| return get_compiler_version(comp, '-qversion', '([0-9]?[0-9]\\.[0-9])')
|
'The fortran and C/C++ versions of the XL compiler are always
two units apart. By this we mean that the fortran release that
goes with XL C/C++ 11.1 is 13.1. Having such a difference in
version number is confusing spack quite a lot. Most notably
if you keep the versions as is the default xl compiler will
only have fortran and no C/C++. So we associate the Fortran
compiler with the version associated to the C/C++ compiler.
One last stumble. Version numbers over 10 have at least a .1
those under 10 a .0. There is no xlf 9.x or under currently
available. BG/P and BG/L can such a compiler mix and possibly
older version of AIX and linux on power.'
| @classmethod
def fc_version(cls, fc):
| fver = get_compiler_version(fc, '-qversion', '([0-9]?[0-9]\\.[0-9])')
cver = (float(fver) - 2)
if (cver < 10):
cver = (cver - 0.1)
return str(cver)
|
'The \'-V\' option works for nag compilers.
Output looks like this::
NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
Product NPL6A60NA for x86-64 Linux'
| @classmethod
def default_version(self, comp):
| return get_compiler_version(comp, '-V', 'NAG Fortran Compiler Release ([0-9.]+)')
|
'The ``-V`` option works for all the PGI compilers.
Output looks like this::
pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge
The Portland Group - PGI Compilers and Tools
Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
on x86-64, and::
pgcc 17.4-0 linuxpower target on Linuxpower
PGI Compilers and Tools
Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.
on PowerPC.'
| @classmethod
def default_version(cls, comp):
| return get_compiler_version(comp, '-V', 'pg[^ ]* ([0-9.]+)-[0-9]+ [^ ]+ target on ')
|
'The \'-qversion\' is the standard option fo XL compilers.
Output looks like this::
IBM XL C/C++ for Linux, V11.1 (5724-X14)
Version: 11.01.0000.0000
or::
IBM XL Fortran for Linux, V13.1 (5724-X16)
Version: 13.01.0000.0000
or::
IBM XL C/C++ for AIX, V11.1 (5724-X13)
Version: 11.01.0000.0009
or::
IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0
Version: 09.00.0000.0017'
| @classmethod
def default_version(cls, comp):
| return get_compiler_version(comp, '-qversion', '([0-9]?[0-9]\\.[0-9])')
|
'The fortran and C/C++ versions of the XL compiler are always
two units apart. By this we mean that the fortran release that
goes with XL C/C++ 11.1 is 13.1. Having such a difference in
version number is confusing spack quite a lot. Most notably
if you keep the versions as is the default xl compiler will
only have fortran and no C/C++. So we associate the Fortran
compiler with the version associated to the C/C++ compiler.
One last stumble. Version numbers over 10 have at least a .1
those under 10 a .0. There is no xlf 9.x or under currently
available. BG/P and BG/L can such a compiler mix and possibly
older version of AIX and linux on power.'
| @classmethod
def fc_version(cls, fc):
| fver = get_compiler_version(fc, '-qversion', '([0-9]?[0-9]\\.[0-9])')
cver = (float(fver) - 2)
if (cver < 10):
cver = (cver - 0.1)
return str(cver)
|
'Subclasses should implement this to return the name the module command
uses to refer to the package.'
| @property
def use_name(self):
| name = self.spec.format(self.naming_scheme)
parts = name.split('/')
name = join_path(*parts)
path_elements = ([name] + self._get_suffixes())
return '-'.join(path_elements)
|
'Writes out a module file for this object.
This method employs a template pattern and expects derived classes to:
- override the header property
- provide formats for autoload, prerequisites and environment changes'
| def write(self, overwrite=False):
| if self.blacklisted:
return
tty.debug((' DCTB WRITE : %s [%s]' % (self.spec.cshort_spec, self.file_name)))
module_dir = os.path.dirname(self.file_name)
if (not os.path.exists(module_dir)):
mkdirp(module_dir)
env = inspect_path(self.spec.prefix)
spack_env = EnvironmentModifications()
for item in dependencies(self.spec, 'all'):
package = self.spec[item.name].package
modules = parent_class_modules(package.__class__)
for mod in modules:
set_module_variables_for_package(package, mod)
set_module_variables_for_package(package, package.module)
package.setup_dependent_package(self.pkg.module, self.spec)
package.setup_dependent_environment(spack_env, env, self.spec)
set_module_variables_for_package(self.pkg, self.pkg.module)
self.spec.package.setup_environment(spack_env, env)
(module_configuration, conf_env) = parse_config_options(self)
env.extend(conf_env)
filters = module_configuration.get('filter', {}).get('environment_blacklist', {})
module_file_content = self.header
for x in filter_blacklisted(module_configuration.pop('autoload', []), self.name):
module_file_content += self.autoload(x)
for x in module_configuration.pop('load', []):
module_file_content += self.autoload(x)
for x in filter_blacklisted(module_configuration.pop('prerequisites', []), self.name):
module_file_content += self.prerequisite(x)
for line in self.process_environment_command(filter_environment_blacklist(env, filters)):
module_file_content += line
for line in self.module_specific_content(module_configuration):
module_file_content += line
if ((not overwrite) and os.path.exists(self.file_name)):
message = 'Module file already exists : skipping creation\n'
message += 'file : {0.file_name}\n'
message += 'spec : {0.spec}'
tty.warn(message.format(self))
return
with open(self.file_name, 'w') as f:
f.write(module_file_content)
|
'Subclasses should implement this to return the name of the file
where this module lives.'
| @property
def file_name(self):
| raise NotImplementedError()
|
'Yields all the relevant combinations that could appear in the hierarchy'
| def _hierarchy_token_combinations(self):
| for ii in range((len(self.hierarchy_tokens) + 1)):
for item in itertools.combinations(self.hierarchy_tokens, ii):
if ('compiler' in item):
(yield item)
|
'Filters a list of hierarchy tokens and yields only the one that we
need to provide'
| def _hierarchy_to_be_provided(self):
| for item in self._hierarchy_token_combinations():
if any(((x in self.provides) for x in item)):
(yield item)
|
'Override just this to override all compiler version functions.'
| @classmethod
def default_version(cls, cc):
| return dumpversion(cc)
|
'Finds compilers in the paths supplied.
Looks for all combinations of ``compiler_names`` with the
``prefixes`` and ``suffixes`` defined for this compiler
class. If any compilers match the compiler_names,
prefixes, or suffixes, uses ``detect_version`` to figure
out what version the compiler is.
This returns a dict with compilers grouped by (prefix,
suffix, version) tuples. This can be further organized by
find().'
| @classmethod
def _find_matches_in_path(cls, compiler_names, detect_version, *path):
| if (not path):
path = get_path('PATH')
prefixes = ([''] + cls.prefixes)
suffixes = ([''] + cls.suffixes)
checks = []
for directory in path:
if (not (os.path.isdir(directory) and os.access(directory, (os.R_OK | os.X_OK)))):
continue
files = os.listdir(directory)
for exe in files:
full_path = join_path(directory, exe)
prod = itertools.product(prefixes, compiler_names, suffixes)
for (pre, name, suf) in prod:
regex = ('^(%s)%s(%s)$' % (pre, re.escape(name), suf))
match = re.match(regex, exe)
if match:
key = ((full_path,) + match.groups())
checks.append(key)
def check(key):
try:
(full_path, prefix, suffix) = key
version = detect_version(full_path)
return (version, prefix, suffix, full_path)
except ProcessError as e:
tty.debug(("Couldn't get version for compiler %s" % full_path), e)
return None
except Exception as e:
tty.debug(('Error while executing candidate compiler %s' % full_path), ('%s: %s' % (e.__class__.__name__, e)))
return None
successful = [k for k in parmap(check, checks) if (k is not None)]
successful.reverse()
return dict((((v, p, s), path) for (v, p, s, path) in successful))
|
'Return the actual path for a tool.
Some toolchains use forwarding executables (particularly Xcode-based
toolchains) which can be manipulated by external environment variables.
This method should be used to extract the actual path used for a tool
by finding out the end executable the forwarding executables end up
running.'
| def _find_full_path(self, path):
| return path
|
'Set any environment variables necessary to use the compiler.'
| def setup_custom_environment(self, pkg, env):
| pass
|
'Return a string representation of the compiler toolchain.'
| def __repr__(self):
| return self.__str__()
|
'Return a string representation of the compiler toolchain.'
| def __str__(self):
| return ('%s(%s)' % (self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system))))))
|
'Returns the directory containing the main Makefile
:return: build directory'
| @property
def build_directory(self):
| return self.stage.source_path
|
'Edits the Makefile before calling make. This phase cannot
be defaulted.'
| def edit(self, spec, prefix):
| tty.msg('Using default implementation: skipping edit phase.')
|
'Calls make, passing :py:attr:`~.MakefilePackage.build_targets`
as targets.'
| def build(self, spec, prefix):
| with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.build_targets)
|
'Calls make, passing :py:attr:`~.MakefilePackage.install_targets`
as targets.'
| def install(self, spec, prefix):
| with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.install_targets)
|
'Searches the Makefile for targets ``test`` and ``check``
and runs them if found.'
| def check(self):
| with working_dir(self.build_directory):
self._if_make_target_execute('test')
self._if_make_target_execute('check')
|
'Searches the Makefile for an ``installcheck`` target
and runs it if found.'
| def installcheck(self):
| with working_dir(self.build_directory):
self._if_make_target_execute('installcheck')
|
'Some packages ship with an older config.guess and need to have
this updated when installed on a newer architecture. In particular,
config.guess fails for PPC64LE for version prior to a 2013-06-10
build date (automake 1.13.4).'
| @run_after('autoreconf')
def _do_patch_config_guess(self):
| if ((not self.patch_config_guess) or (not self.spec.satisfies('arch=linux-rhel7-ppc64le'))):
return
my_config_guess = None
config_guess = None
if os.path.exists('config.guess'):
my_config_guess = 'config.guess'
else:
d = '.'
dirs = [os.path.join(d, o) for o in os.listdir(d) if os.path.isdir(os.path.join(d, o))]
for dirname in dirs:
path = os.path.join(dirname, 'config.guess')
if os.path.exists(path):
my_config_guess = path
if (my_config_guess is not None):
try:
check_call([my_config_guess], stdout=PIPE, stderr=PIPE)
return
except Exception:
pass
else:
return
if ('automake' in self.spec):
automake_path = os.path.join(self.spec['automake'].prefix, 'share', ('automake-' + str(self.spec['automake'].version)))
path = os.path.join(automake_path, 'config.guess')
if os.path.exists(path):
config_guess = path
if ((config_guess is None) and os.path.exists('/usr/share')):
automake_dir = [s for s in os.listdir('/usr/share') if ('automake' in s)]
if automake_dir:
automake_path = os.path.join('/usr/share', automake_dir[0])
path = os.path.join(automake_path, 'config.guess')
if os.path.exists(path):
config_guess = path
if (config_guess is not None):
try:
check_call([config_guess], stdout=PIPE, stderr=PIPE)
mod = ((stat(my_config_guess).st_mode & 511) | S_IWUSR)
os.chmod(my_config_guess, mod)
shutil.copyfile(config_guess, my_config_guess)
return
except Exception:
pass
raise RuntimeError('Failed to find suitable config.guess')
|
'Returns the directory where \'configure\' resides.
:return: directory where to find configure'
| @property
def configure_directory(self):
| return self.stage.source_path
|
'Override to provide another place to build the package'
| @property
def build_directory(self):
| return self.configure_directory
|
'Not needed usually, configure should be already there'
| def autoreconf(self, spec, prefix):
| if os.path.exists(self.configure_abs_path):
return
autotools = ['m4', 'autoconf', 'automake', 'libtool']
missing = [x for x in autotools if (x not in spec)]
if missing:
msg = 'Cannot generate configure: missing dependencies {0}'
raise RuntimeError(msg.format(missing))
tty.msg('Configure script not found: trying to generate it')
tty.warn('*********************************************************')
tty.warn('* If the default procedure fails, consider implementing *')
tty.warn('* a custom AUTORECONF phase in the package *')
tty.warn('*********************************************************')
with working_dir(self.configure_directory):
m = inspect.getmodule(self)
m.libtoolize()
m.aclocal()
autoreconf_args = ['-ivf']
if ('pkg-config' in spec):
autoreconf_args += ['-I', join_path(spec['pkg-config'].prefix, 'share', 'aclocal')]
autoreconf_args += self.autoreconf_extra_args
m.autoreconf(*autoreconf_args)
|
'Checks the presence of a ``configure`` file after the
autoreconf phase. If it is found sets a module attribute
appropriately, otherwise raises an error.
:raises RuntimeError: if a configure script is not found in
:py:meth:`~AutotoolsPackage.configure_directory`'
| @run_after('autoreconf')
def set_configure_or_die(self):
| if (not os.path.exists(self.configure_abs_path)):
msg = 'configure script not found in {0}'
raise RuntimeError(msg.format(self.configure_directory))
inspect.getmodule(self).configure = Executable(self.configure_abs_path)
|
'Produces a list containing all the arguments that must be passed to
configure, except ``--prefix`` which will be pre-pended to the list.
:return: list of arguments for configure'
| def configure_args(self):
| return []
|
'Runs configure with the arguments specified in
:py:meth:`~.AutotoolsPackage.configure_args`
and an appropriately set prefix.'
| def configure(self, spec, prefix):
| options = (['--prefix={0}'.format(prefix)] + self.configure_args())
with working_dir(self.build_directory, create=True):
inspect.getmodule(self).configure(*options)
|
'Makes the build targets specified by
:py:attr:``~.AutotoolsPackage.build_targets``'
| def build(self, spec, prefix):
| with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.build_targets)
|
'Makes the install targets specified by
:py:attr:``~.AutotoolsPackage.install_targets``'
| def install(self, spec, prefix):
| with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.install_targets)
|
'Searches the Makefile for targets ``test`` and ``check``
and runs them if found.'
| def check(self):
| with working_dir(self.build_directory):
self._if_make_target_execute('test')
self._if_make_target_execute('check')
|
'Inspects the multi-valued variant \'name\' and returns the configure
arguments that activate / deactivate the selected feature.
:param str name: name of a valid multi-valued variant
:param callable active_parameters: if present accepts a single value
and returns the parameter to be used leading to an entry of the
type \'--with-{name}={parameter}'
| def with_or_without(self, name, active_parameters=None):
| return self._activate_or_not('with', 'without', name, active_parameters)
|
'Inspects the multi-valued variant \'name\' and returns the configure
arguments that activate / deactivate the selected feature.'
| def enable_or_disable(self, name, active_parameters=None):
| return self._activate_or_not('enable', 'disable', name, active_parameters)
|
'Searches the Makefile for an ``installcheck`` target
and runs it if found.'
| def installcheck(self):
| with working_dir(self.build_directory):
self._if_make_target_execute('installcheck')
|
'The directory containing the ``waf`` file.'
| @property
def build_directory(self):
| return self.stage.source_path
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.