Instruction stringlengths 6 217k | input_code stringlengths 0 1.21M | output_code stringlengths 10 1.22M | diff_patch stringlengths 0 759k |
|---|---|---|---|
Don't bail when a dependency can't be found
When a dependency for a package can't be found, conda bails completely, but this can happen e.g., just for some old builds of something. So we should just exclude any package like this from the solver.
| conda/resolve.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from itertools import combinations
from collections import defaultdict
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints)
from conda.console import setup_handlers
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
if version.endswith('.dev'):
version += '0'
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
return version
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise RuntimeError("No packages found matching: %s" % ms)
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) ==\
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise RuntimeError("No packages found matching: %s" % ms)
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
continue
res[pkg2.fn] = pkg2
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield [-v1, -v2]
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield clause
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield clause
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield clause
# Don't instlal any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield [-v[fn]]
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield clause
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
continue
dists.update(self.all_deps(pkg.fn, max_only=max_only))
dists[pkg.fn] = pkg
return dists
def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
dists = self.get_dists(specs, max_only=True)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = self.gen_clauses(v, dists, specs, features)
solutions = min_sat(clauses)
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = list(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Check the common case first
dotlog.debug("Building the constraint with rhs: [0, 0]")
constraints = list(generate_constraints(eq, m, [0, 0], alg=alg))
# Only relevant for build_BDD
if constraints and constraints[0] == [false]:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and constraints[0] == [true]:
constraints = []
dotlog.debug("Checking for solutions with rhs: [0, 0]")
solution = sat(clauses + constraints)
if not solution:
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: ')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
def version_constraints(lo, hi):
return list(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
constraints = bisect_constraints(0, max_rhs, clauses, version_constraints)
dotlog.debug("Finding the minimal solution")
solutions = min_sat(clauses + constraints, N=m+1)
assert solutions, (specs, features)
if len(solutions) > 1:
print('Warning:', len(solutions), "possible package resolutions:")
for sol in solutions:
print('\t', [w[lit] for lit in sol if 0 < lit <= m])
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
hint = []
# Try to find the largest satisfiable subset
found = False
for i in range(len(specs), 0, -1):
if found:
break
for comb in combinations(specs, i):
try:
self.solve2(comb, features, guess=False)
except RuntimeError:
pass
else:
rem = set(specs) - set(comb)
rem.discard('conda')
if len(rem) == 1:
hint.append("%s" % rem.pop())
else:
hint.append("%s" % ' and '.join(rem))
found = True
if not hint:
return ''
if len(hint) == 1:
return ("\nHint: %s has a conflict with the remaining packages" %
hint[0])
return ("""
Hint: the following combinations of packages create a conflict with the
remaining packages:
- %s""" % '\n - '.join(hint))
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
<|code_end|>
| conda/resolve.py
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from itertools import combinations
from collections import defaultdict
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints)
from conda.console import setup_handlers
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
if version.endswith('.dev'):
version += '0'
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
return version
class NoPackagesFound(RuntimeError):
def __init__(self, msg, pkg):
super(NoPackagesFound, self).__init__(msg)
self.pkg = pkg
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) ==\
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
continue
res[pkg2.fn] = pkg2
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield [-v1, -v2]
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield clause
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield clause
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield clause
# Don't instlal any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield [-v[fn]]
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield clause
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
found = False
notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
found = True
continue
try:
dists.update(self.all_deps(pkg.fn, max_only=max_only))
except NoPackagesFound as e:
# Ignore any package that has nonexisting dependencies.
notfound.append(e.pkg)
else:
dists[pkg.fn] = pkg
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None)
return dists
def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
try:
dists = self.get_dists(specs, max_only=True)
except NoPackagesFound:
# Handle packages that are not included because some dependencies
# couldn't be found.
pass
else:
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = self.gen_clauses(v, dists, specs, features)
solutions = min_sat(clauses)
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = list(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Check the common case first
dotlog.debug("Building the constraint with rhs: [0, 0]")
constraints = list(generate_constraints(eq, m, [0, 0], alg=alg))
# Only relevant for build_BDD
if constraints and constraints[0] == [false]:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and constraints[0] == [true]:
constraints = []
dotlog.debug("Checking for solutions with rhs: [0, 0]")
solution = sat(clauses + constraints)
if not solution:
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: ')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
def version_constraints(lo, hi):
return list(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
constraints = bisect_constraints(0, max_rhs, clauses, version_constraints)
dotlog.debug("Finding the minimal solution")
solutions = min_sat(clauses + constraints, N=m+1)
assert solutions, (specs, features)
if len(solutions) > 1:
print('Warning:', len(solutions), "possible package resolutions:")
for sol in solutions:
print('\t', [w[lit] for lit in sol if 0 < lit <= m])
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
hint = []
# Try to find the largest satisfiable subset
found = False
for i in range(len(specs), 0, -1):
if found:
break
for comb in combinations(specs, i):
try:
self.solve2(comb, features, guess=False)
except RuntimeError:
pass
else:
rem = set(specs) - set(comb)
rem.discard('conda')
if len(rem) == 1:
hint.append("%s" % rem.pop())
else:
hint.append("%s" % ' and '.join(rem))
found = True
if not hint:
return ''
if len(hint) == 1:
return ("\nHint: %s has a conflict with the remaining packages" %
hint[0])
return ("""
Hint: the following combinations of packages create a conflict with the
remaining packages:
- %s""" % '\n - '.join(hint))
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
| conda/resolve.py
--- a/conda/resolve.py
+++ b/conda/resolve.py
@@ -30,6 +30,11 @@ def normalized_version(version):
return version
+class NoPackagesFound(RuntimeError):
+ def __init__(self, msg, pkg):
+ super(NoPackagesFound, self).__init__(msg)
+ self.pkg = pkg
+
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
@@ -243,7 +248,7 @@ def track_features(self, fn):
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
- raise RuntimeError("No packages found matching: %s" % ms)
+ raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
if max_only:
maxpkg = max(pkgs)
ret = []
@@ -262,7 +267,7 @@ def get_pkgs(self, ms, max_only=False):
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
- raise RuntimeError("No packages found matching: %s" % ms)
+ raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
for pkg in pkgs:
yield pkg.fn
@@ -371,11 +376,22 @@ def generate_version_eq(self, v, dists, include0=False):
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
+ found = False
+ notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
+ found = True
continue
- dists.update(self.all_deps(pkg.fn, max_only=max_only))
- dists[pkg.fn] = pkg
+ try:
+ dists.update(self.all_deps(pkg.fn, max_only=max_only))
+ except NoPackagesFound as e:
+ # Ignore any package that has nonexisting dependencies.
+ notfound.append(e.pkg)
+ else:
+ dists[pkg.fn] = pkg
+ found = True
+ if not found:
+ raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None)
return dists
@@ -387,25 +403,31 @@ def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
- dists = self.get_dists(specs, max_only=True)
-
- v = {} # map fn to variable number
- w = {} # map variable number to fn
- i = -1 # in case the loop doesn't run
- for i, fn in enumerate(sorted(dists)):
- v[fn] = i + 1
- w[i + 1] = fn
- m = i + 1
-
- dotlog.debug("Solving using max dists only")
- clauses = self.gen_clauses(v, dists, specs, features)
- solutions = min_sat(clauses)
-
- if len(solutions) == 1:
- ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
- if returnall:
- return [ret]
- return ret
+ try:
+ dists = self.get_dists(specs, max_only=True)
+ except NoPackagesFound:
+ # Handle packages that are not included because some dependencies
+ # couldn't be found.
+ pass
+ else:
+ v = {} # map fn to variable number
+ w = {} # map variable number to fn
+ i = -1 # in case the loop doesn't run
+ for i, fn in enumerate(sorted(dists)):
+ v[fn] = i + 1
+ w[i + 1] = fn
+ m = i + 1
+
+ dotlog.debug("Solving using max dists only")
+ clauses = self.gen_clauses(v, dists, specs, features)
+ solutions = min_sat(clauses)
+
+
+ if len(solutions) == 1:
+ ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
+ if returnall:
+ return [ret]
+ return ret
dists = self.get_dists(specs)
|
Make the conda install table easier to read
The table of what packages will be installed and removed is hard to read. For one thing, it's hard to tell easily what packages are not removed but just upgraded or downgraded. Also, the "link" terminology is confusing.
A suggestion by @jklowden:
```
$ conda update conda
Updating Anaconda environment at /usr/local/anaconda
The following packages will be downloaded:
conda-2.2.3-py27_0.tar.bz2
[http://repo.continuum.io/pkgs/free/osx-64/]
The following packages will be upgraded:
Old version Replace with
------------------------- -------------------------
conda-1.4.4 conda-2.2.3
```
> or, if you really want the build (I don't, it's not meaningful to the user)
```
package Old version New version
------------ ------------------ ------------------
conda 1.4.4, py27_0 2.2.3, py27_0
```
I think the build is meaningful as it tells you what Python version is being used. It also tells you if you are using mkl. And also some people might use the build string to put other information which may be useful to users.
<!---
@huboard:{"order":3.3142282405143226e-49,"custom_state":""}
-->
| conda/cli/main_remove.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from argparse import RawDescriptionHelpFormatter
from conda.cli import common
help = "Remove a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution.
"""
example = """
examples:
conda remove -n myenv scipy
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'remove',
formatter_class = RawDescriptionHelpFormatter,
description = descr,
help = help,
epilog = example,
)
common.add_parser_yes(p)
p.add_argument(
"--all",
action = "store_true",
help = "remove all packages, i.e. the entire environment",
)
p.add_argument(
"--features",
action = "store_true",
help = "remove features (instead of packages)",
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
p.add_argument(
'package_names',
metavar = 'package_name',
action = "store",
nargs = '*',
help = "package names to remove from environment",
)
p.set_defaults(func=execute)
def execute(args, parser):
import sys
import conda.plan as plan
from conda.api import get_index
from conda.cli import pscheck
from conda.install import rm_rf, linked
if not (args.all or args.package_names):
sys.exit('Error: no package names supplied,\n'
' try "conda remove -h" for more details')
prefix = common.get_prefix(args)
common.check_write('remove', prefix)
index = None
if args.features:
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
index = get_index(channel_urls=channel_urls,
prepend=not args.override_channels)
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
sys.exit('Error: cannot remove root environment,\n'
' add -n NAME or -p PREFIX option')
actions = {plan.PREFIX: prefix,
plan.UNLINK: sorted(linked(prefix))}
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
sys.exit('Error: cannot remove %s from root environment' %
', '.join(common.root_no_rm))
actions = plan.remove_actions(prefix, specs, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
return
sys.exit('Error: no packages found to remove from '
'environment: %s' % prefix)
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if not pscheck.main(args):
common.confirm_yn(args)
plan.execute_actions(actions, index, verbose=not args.quiet)
if args.all:
rm_rf(prefix)
<|code_end|>
conda/plan.py
<|code_start|>
"""
Handle the planning of installs and their execution.
NOTE:
conda.install uses canonical package names in its interface functions,
whereas conda.resolve uses package filenames, as those are used as index
keys. We try to keep fixes to this "impedance mismatch" local to this
module.
"""
from __future__ import print_function, division, absolute_import
import re
import sys
from logging import getLogger
from collections import defaultdict
from os.path import abspath, isfile, join, exists
from conda import config
from conda import install
from conda.fetch import fetch_pkg
from conda.history import History
from conda.resolve import MatchSpec, Resolve
from conda.utils import md5_file, human_bytes
log = getLogger(__name__)
# op codes
FETCH = 'FETCH'
EXTRACT = 'EXTRACT'
UNLINK = 'UNLINK'
LINK = 'LINK'
RM_EXTRACTED = 'RM_EXTRACTED'
RM_FETCHED = 'RM_FETCHED'
PREFIX = 'PREFIX'
PRINT = 'PRINT'
PROGRESS = 'PROGRESS'
SYMLINK_CONDA = 'SYMLINK_CONDA'
progress_cmds = set([EXTRACT, RM_EXTRACTED, LINK, UNLINK])
def print_dists(dists_extras):
fmt = " %-27s|%17s"
print(fmt % ('package', 'build'))
print(fmt % ('-' * 27, '-' * 17))
for dist, extra in dists_extras:
line = fmt % tuple(dist.rsplit('-', 1))
if extra:
line += extra
print(line)
def split_linkarg(arg):
"Return tuple(dist, pkgs_dir, linktype)"
pat = re.compile(r'\s*(\S+)(?:\s+(.+?)\s+(\d+))?\s*$')
m = pat.match(arg)
dist, pkgs_dir, linktype = m.groups()
if pkgs_dir is None:
pkgs_dir = config.pkgs_dirs[0]
if linktype is None:
linktype = install.LINK_HARD
return dist, pkgs_dir, int(linktype)
def display_actions(actions, index=None):
if actions.get(FETCH):
print("\nThe following packages will be downloaded:\n")
disp_lst = []
for dist in actions[FETCH]:
info = index[dist + '.tar.bz2']
extra = '%15s' % human_bytes(info['size'])
if config.show_channel_urls:
extra += ' %s' % config.canonical_channel_name(
info.get('channel'))
disp_lst.append((dist, extra))
print_dists(disp_lst)
if index and len(actions[FETCH]) > 1:
print(' ' * 4 + '-' * 60)
print(" " * 43 + "Total: %14s" %
human_bytes(sum(index[dist + '.tar.bz2']['size']
for dist in actions[FETCH])))
if actions.get(UNLINK):
print("\nThe following packages will be UN-linked:\n")
print_dists([
(dist, None)
for dist in actions[UNLINK]])
if actions.get(LINK):
print("\nThe following packages will be linked:\n")
lst = []
for arg in actions[LINK]:
dist, pkgs_dir, lt = split_linkarg(arg)
extra = ' %s' % install.link_name_map.get(lt)
lst.append((dist, extra))
print_dists(lst)
print()
# the order matters here, don't change it
action_codes = FETCH, EXTRACT, UNLINK, LINK, SYMLINK_CONDA, RM_EXTRACTED, RM_FETCHED
def nothing_to_do(actions):
for op in action_codes:
if actions.get(op):
return False
return True
def plan_from_actions(actions):
if 'op_order' in actions and actions['op_order']:
op_order = actions['op_order']
else:
op_order = action_codes
assert PREFIX in actions and actions[PREFIX]
res = ['# plan',
'PREFIX %s' % actions[PREFIX]]
for op in op_order:
if op not in actions:
continue
if not actions[op]:
continue
if '_' not in op:
res.append('PRINT %sing packages ...' % op.capitalize())
if op in progress_cmds:
res.append('PROGRESS %d' % len(actions[op]))
for arg in actions[op]:
res.append('%s %s' % (op, arg))
return res
def extracted_where(dist):
for pkgs_dir in config.pkgs_dirs:
if install.is_extracted(pkgs_dir, dist):
return pkgs_dir
return None
def ensure_linked_actions(dists, prefix):
actions = defaultdict(list)
actions[PREFIX] = prefix
for dist in dists:
if install.is_linked(prefix, dist):
continue
extracted_in = extracted_where(dist)
if extracted_in:
if install.try_hard_link(extracted_in, prefix, dist):
lt = install.LINK_HARD
else:
lt = (install.LINK_SOFT if (config.allow_softlinks and
sys.platform != 'win32') else
install.LINK_COPY)
actions[LINK].append('%s %s %d' % (dist, extracted_in, lt))
continue
actions[LINK].append(dist)
actions[EXTRACT].append(dist)
if install.is_fetched(config.pkgs_dirs[0], dist):
continue
actions[FETCH].append(dist)
return actions
def force_linked_actions(dists, index, prefix):
actions = defaultdict(list)
actions[PREFIX] = prefix
actions['op_order'] = (RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT,
UNLINK, LINK)
for dist in dists:
fn = dist + '.tar.bz2'
pkg_path = join(config.pkgs_dirs[0], fn)
if isfile(pkg_path):
try:
if md5_file(pkg_path) != index[fn]['md5']:
actions[RM_FETCHED].append(dist)
actions[FETCH].append(dist)
except KeyError:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
else:
actions[FETCH].append(dist)
actions[RM_EXTRACTED].append(dist)
actions[EXTRACT].append(dist)
if isfile(join(prefix, 'conda-meta', dist + '.json')):
actions[UNLINK].append(dist)
actions[LINK].append(dist)
return actions
# -------------------------------------------------------------------
def is_root_prefix(prefix):
return abspath(prefix) == abspath(config.root_dir)
def dist2spec3v(dist):
name, version, unused_build = dist.rsplit('-', 2)
return '%s %s*' % (name, version[:3])
def add_defaults_to_specs(r, linked, specs):
# TODO: This should use the pinning mechanism. But don't change the API:
# cas uses it.
if r.explicit(specs):
return
log.debug('H0 specs=%r' % specs)
names_linked = {install.name_dist(dist): dist for dist in linked}
names_ms = {MatchSpec(s).name: MatchSpec(s) for s in specs}
for name, def_ver in [('python', config.default_python),]:
#('numpy', config.default_numpy)]:
ms = names_ms.get(name)
if ms and ms.strictness > 1:
# if any of the specifications mention the Python/Numpy version,
# we don't need to add the default spec
log.debug('H1 %s' % name)
continue
any_depends_on = any(ms2.name == name
for spec in specs
for fn in r.get_max_dists(MatchSpec(spec))
for ms2 in r.ms_depends(fn))
log.debug('H2 %s %s' % (name, any_depends_on))
if not any_depends_on and name not in names_ms:
# if nothing depends on Python/Numpy AND the Python/Numpy is not
# specified, we don't need to add the default spec
log.debug('H2A %s' % name)
continue
if (any_depends_on and len(specs) >= 1 and
MatchSpec(specs[0]).strictness == 3):
# if something depends on Python/Numpy, but the spec is very
# explicit, we also don't need to add the default spec
log.debug('H2B %s' % name)
continue
if name in names_linked:
# if Python/Numpy is already linked, we add that instead of the
# default
log.debug('H3 %s' % name)
specs.append(dist2spec3v(names_linked[name]))
continue
if (name, def_ver) in [('python', '3.3'), ('python', '3.4')]:
# Don't include Python 3 in the specs if this is the Python 3
# version of conda.
continue
specs.append('%s %s*' % (name, def_ver))
log.debug('HF specs=%r' % specs)
def get_pinned_specs(prefix):
pinfile = join(prefix, 'conda-meta', 'pinned')
if not exists(pinfile):
return []
with open(pinfile) as f:
return list(filter(len, f.read().strip().split('\n')))
def install_actions(prefix, index, specs, force=False, only_names=None, pinned=True, minimal_hint=False):
r = Resolve(index)
linked = install.linked(prefix)
if config.self_update and is_root_prefix(prefix):
specs.append('conda')
add_defaults_to_specs(r, linked, specs)
if pinned:
pinned_specs = get_pinned_specs(prefix)
specs += pinned_specs
# TODO: Improve error messages here
must_have = {}
for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
config.track_features, minimal_hint=minimal_hint):
dist = fn[:-8]
name = install.name_dist(dist)
if only_names and name not in only_names:
continue
must_have[name] = dist
if is_root_prefix(prefix):
if install.on_win:
for name in install.win_ignore_root:
if name in must_have:
del must_have[name]
for name in config.foreign:
if name in must_have:
del must_have[name]
else:
# discard conda from other environments
if 'conda' in must_have:
sys.exit("Error: 'conda' can only be installed into "
"root environment")
smh = sorted(must_have.values())
if force:
actions = force_linked_actions(smh, index, prefix)
else:
actions = ensure_linked_actions(smh, prefix)
if actions[LINK] and sys.platform != 'win32':
actions[SYMLINK_CONDA] = [config.root_dir]
for dist in sorted(linked):
name = install.name_dist(dist)
if name in must_have and dist != must_have[name]:
actions[UNLINK].append(dist)
return actions
def remove_actions(prefix, specs, pinned=True):
linked = install.linked(prefix)
mss = [MatchSpec(spec) for spec in specs]
pinned_specs = get_pinned_specs(prefix)
actions = defaultdict(list)
actions[PREFIX] = prefix
for dist in sorted(linked):
if any(ms.match('%s.tar.bz2' % dist) for ms in mss):
if pinned and any(MatchSpec(spec).match('%s.tar.bz2' % dist) for spec in
pinned_specs):
raise RuntimeError("Cannot remove %s because it is pinned. Use --no-pin to override." % dist)
actions[UNLINK].append(dist)
return actions
def remove_features_actions(prefix, index, features):
linked = install.linked(prefix)
r = Resolve(index)
actions = defaultdict(list)
actions[PREFIX] = prefix
_linked = [d + '.tar.bz2' for d in linked]
to_link = []
for dist in sorted(linked):
fn = dist + '.tar.bz2'
if fn not in index:
continue
if r.track_features(fn).intersection(features):
actions[UNLINK].append(dist)
if r.features(fn).intersection(features):
actions[UNLINK].append(dist)
subst = r.find_substitute(_linked, features, fn)
if subst:
to_link.append(subst[:-8])
if to_link:
actions.update(ensure_linked_actions(to_link, prefix))
return actions
def revert_actions(prefix, revision=-1):
h = History(prefix)
h.update()
try:
state = h.get_state(revision)
except IndexError:
sys.exit("Error: no such revision: %d" % revision)
curr = h.get_state()
if state == curr:
return {}
actions = ensure_linked_actions(state, prefix)
for dist in curr - state:
actions[UNLINK].append(dist)
return actions
# ---------------------------- EXECUTION --------------------------
def fetch(index, dist):
assert index is not None
fn = dist + '.tar.bz2'
fetch_pkg(index[fn])
def link(prefix, arg, index=None):
dist, pkgs_dir, lt = split_linkarg(arg)
install.link(pkgs_dir, prefix, dist, lt, index=index)
def cmds_from_plan(plan):
res = []
for line in plan:
log.debug(' %s' % line)
line = line.strip()
if not line or line.startswith('#'):
continue
res.append(line.split(None, 1))
return res
def execute_plan(plan, index=None, verbose=False):
if verbose:
from conda.console import setup_verbose_handlers
setup_verbose_handlers()
# set default prefix
prefix = config.root_dir
i = None
cmds = cmds_from_plan(plan)
for cmd, arg in cmds:
if i is not None and cmd in progress_cmds:
i += 1
getLogger('progress.update').info((install.name_dist(arg), i))
if cmd == PREFIX:
prefix = arg
elif cmd == PRINT:
getLogger('print').info(arg)
elif cmd == FETCH:
fetch(index, arg)
elif cmd == PROGRESS:
i = 0
maxval = int(arg)
getLogger('progress.start').info(maxval)
elif cmd == EXTRACT:
install.extract(config.pkgs_dirs[0], arg)
elif cmd == RM_EXTRACTED:
install.rm_extracted(config.pkgs_dirs[0], arg)
elif cmd == RM_FETCHED:
install.rm_fetched(config.pkgs_dirs[0], arg)
elif cmd == LINK:
link(prefix, arg, index=index)
elif cmd == UNLINK:
install.unlink(prefix, arg)
elif cmd == SYMLINK_CONDA:
install.symlink_conda(prefix, arg)
else:
raise Exception("Did not expect command: %r" % cmd)
if i is not None and cmd in progress_cmds and maxval == i:
i = None
getLogger('progress.stop').info(None)
install.messages(prefix)
def execute_actions(actions, index=None, verbose=False):
plan = plan_from_actions(actions)
with History(actions[PREFIX]):
execute_plan(plan, index, verbose)
if __name__ == '__main__':
# for testing new revert_actions() only
from pprint import pprint
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
<|code_end|>
| conda/cli/main_remove.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from argparse import RawDescriptionHelpFormatter
from conda.cli import common
help = "Remove a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution.
"""
example = """
examples:
conda remove -n myenv scipy
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'remove',
formatter_class = RawDescriptionHelpFormatter,
description = descr,
help = help,
epilog = example,
)
common.add_parser_yes(p)
p.add_argument(
"--all",
action = "store_true",
help = "remove all packages, i.e. the entire environment",
)
p.add_argument(
"--features",
action = "store_true",
help = "remove features (instead of packages)",
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
p.add_argument(
'package_names',
metavar = 'package_name',
action = "store",
nargs = '*',
help = "package names to remove from environment",
)
p.set_defaults(func=execute)
def execute(args, parser):
import sys
import conda.plan as plan
from conda.api import get_index
from conda.cli import pscheck
from conda.install import rm_rf, linked
from conda import config
if not (args.all or args.package_names):
sys.exit('Error: no package names supplied,\n'
' try "conda remove -h" for more details')
prefix = common.get_prefix(args)
common.check_write('remove', prefix)
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
index = get_index(channel_urls=channel_urls,
prepend=not args.override_channels)
if args.features:
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
sys.exit('Error: cannot remove root environment,\n'
' add -n NAME or -p PREFIX option')
actions = {plan.PREFIX: prefix,
plan.UNLINK: sorted(linked(prefix))}
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
sys.exit('Error: cannot remove %s from root environment' %
', '.join(common.root_no_rm))
actions = plan.remove_actions(prefix, specs, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
return
sys.exit('Error: no packages found to remove from '
'environment: %s' % prefix)
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if not pscheck.main(args):
common.confirm_yn(args)
plan.execute_actions(actions, index, verbose=not args.quiet)
if args.all:
rm_rf(prefix)
conda/plan.py
"""
Handle the planning of installs and their execution.
NOTE:
conda.install uses canonical package names in its interface functions,
whereas conda.resolve uses package filenames, as those are used as index
keys. We try to keep fixes to this "impedance mismatch" local to this
module.
"""
from __future__ import print_function, division, absolute_import
import re
import sys
from logging import getLogger
from collections import defaultdict
from os.path import abspath, isfile, join, exists
from conda import config
from conda import install
from conda.fetch import fetch_pkg
from conda.history import History
from conda.resolve import MatchSpec, Resolve, Package
from conda.utils import md5_file, human_bytes
log = getLogger(__name__)
# op codes
FETCH = 'FETCH'
EXTRACT = 'EXTRACT'
UNLINK = 'UNLINK'
LINK = 'LINK'
RM_EXTRACTED = 'RM_EXTRACTED'
RM_FETCHED = 'RM_FETCHED'
PREFIX = 'PREFIX'
PRINT = 'PRINT'
PROGRESS = 'PROGRESS'
SYMLINK_CONDA = 'SYMLINK_CONDA'
progress_cmds = set([EXTRACT, RM_EXTRACTED, LINK, UNLINK])
def print_dists(dists_extras):
fmt = " %-27s|%17s"
print(fmt % ('package', 'build'))
print(fmt % ('-' * 27, '-' * 17))
for dist, extra in dists_extras:
line = fmt % tuple(dist.rsplit('-', 1))
if extra:
line += extra
print(line)
def split_linkarg(arg):
"Return tuple(dist, pkgs_dir, linktype)"
pat = re.compile(r'\s*(\S+)(?:\s+(.+?)\s+(\d+))?\s*$')
m = pat.match(arg)
dist, pkgs_dir, linktype = m.groups()
if pkgs_dir is None:
pkgs_dir = config.pkgs_dirs[0]
if linktype is None:
linktype = install.LINK_HARD
return dist, pkgs_dir, int(linktype)
def display_actions(actions, index):
if actions.get(FETCH):
print("\nThe following packages will be downloaded:\n")
disp_lst = []
for dist in actions[FETCH]:
info = index[dist + '.tar.bz2']
extra = '%15s' % human_bytes(info['size'])
if config.show_channel_urls:
extra += ' %s' % config.canonical_channel_name(
info.get('channel'))
disp_lst.append((dist, extra))
print_dists(disp_lst)
if index and len(actions[FETCH]) > 1:
print(' ' * 4 + '-' * 60)
print(" " * 43 + "Total: %14s" %
human_bytes(sum(index[dist + '.tar.bz2']['size']
for dist in actions[FETCH])))
# package -> [oldver-oldbuild, newver-newbuild]
packages = defaultdict(lambda: list(('', '')))
features = defaultdict(lambda: list(('', '')))
# This assumes each package will appear in LINK no more than once.
Packages = {}
linktypes = {}
for arg in actions.get(LINK, []):
dist, pkgs_dir, lt = split_linkarg(arg)
pkg, ver, build = dist.rsplit('-', 2)
packages[pkg][1] = ver + '-' + build
Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
linktypes[pkg] = lt
features[pkg][1] = index[dist + '.tar.bz2'].get('features', '')
for arg in actions.get(UNLINK, []):
dist, pkgs_dir, lt = split_linkarg(arg)
pkg, ver, build = dist.rsplit('-', 2)
packages[pkg][0] = ver + '-' + build
Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
features[pkg][0] = index[dist + '.tar.bz2'].get('features', '')
# Put a minimum length here---. .--For the :
# v v
maxpkg = max(len(max(packages or [''], key=len)), 0) + 1
maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0])
maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1])
maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0])
maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1])
maxoldchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
packages[pkg][0]].channel) for pkg in packages if packages[pkg][0]] or
[''], key=len))
maxnewchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
packages[pkg][1]].channel) for pkg in packages if packages[pkg][1]] or
[''], key=len))
new = {pkg for pkg in packages if not packages[pkg][0]}
removed = {pkg for pkg in packages if not packages[pkg][1]}
updated = set()
downgraded = set()
oldfmt = {}
newfmt = {}
for pkg in packages:
# That's right. I'm using old-style string formatting to generate a
# string with new-style string formatting.
oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
if config.show_channel_urls:
oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel
if packages[pkg][0]:
newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
else:
newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver)
if config.show_channel_urls:
newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel
# TODO: Should we also care about the old package's link type?
if pkg in linktypes and linktypes[pkg] != install.LINK_HARD:
newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]]
if features[pkg][0]:
oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
if features[pkg][1]:
newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
if pkg in new or pkg in removed:
continue
P0 = Packages[pkg + '-' + packages[pkg][0]]
P1 = Packages[pkg + '-' + packages[pkg][1]]
try:
# <= here means that unchanged packages will be put in updated
newer = (P0.name, P0.norm_version, P0.build_number) <= (P1.name, P1.norm_version, P1.build_number)
except TypeError:
newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number)
if newer:
updated.add(pkg)
else:
downgraded.add(pkg)
arrow = ' --> '
lead = ' '*4
def format(s, pkg):
channel = ['', '']
for i in range(2):
if packages[pkg][i]:
channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel)
return lead + s.format(pkg=pkg+':', vers=packages[pkg],
channel=channel, features=features[pkg])
if new:
print("\nThe following NEW packages will be INSTALLED:\n")
for pkg in sorted(new):
print(format(newfmt[pkg], pkg))
if removed:
print("\nThe following packages will be REMOVED:\n")
for pkg in sorted(removed):
print(format(oldfmt[pkg], pkg))
if updated:
print("\nThe following packages will be UPDATED:\n")
for pkg in sorted(updated):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
if downgraded:
print("\nThe following packages will be DOWNGRADED:\n")
for pkg in sorted(downgraded):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
print()
# the order matters here, don't change it
action_codes = FETCH, EXTRACT, UNLINK, LINK, SYMLINK_CONDA, RM_EXTRACTED, RM_FETCHED
def nothing_to_do(actions):
for op in action_codes:
if actions.get(op):
return False
return True
def plan_from_actions(actions):
if 'op_order' in actions and actions['op_order']:
op_order = actions['op_order']
else:
op_order = action_codes
assert PREFIX in actions and actions[PREFIX]
res = ['# plan',
'PREFIX %s' % actions[PREFIX]]
for op in op_order:
if op not in actions:
continue
if not actions[op]:
continue
if '_' not in op:
res.append('PRINT %sing packages ...' % op.capitalize())
if op in progress_cmds:
res.append('PROGRESS %d' % len(actions[op]))
for arg in actions[op]:
res.append('%s %s' % (op, arg))
return res
def extracted_where(dist):
for pkgs_dir in config.pkgs_dirs:
if install.is_extracted(pkgs_dir, dist):
return pkgs_dir
return None
def ensure_linked_actions(dists, prefix):
actions = defaultdict(list)
actions[PREFIX] = prefix
for dist in dists:
if install.is_linked(prefix, dist):
continue
extracted_in = extracted_where(dist)
if extracted_in:
if install.try_hard_link(extracted_in, prefix, dist):
lt = install.LINK_HARD
else:
lt = (install.LINK_SOFT if (config.allow_softlinks and
sys.platform != 'win32') else
install.LINK_COPY)
actions[LINK].append('%s %s %d' % (dist, extracted_in, lt))
continue
actions[LINK].append(dist)
actions[EXTRACT].append(dist)
if install.is_fetched(config.pkgs_dirs[0], dist):
continue
actions[FETCH].append(dist)
return actions
def force_linked_actions(dists, index, prefix):
actions = defaultdict(list)
actions[PREFIX] = prefix
actions['op_order'] = (RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT,
UNLINK, LINK)
for dist in dists:
fn = dist + '.tar.bz2'
pkg_path = join(config.pkgs_dirs[0], fn)
if isfile(pkg_path):
try:
if md5_file(pkg_path) != index[fn]['md5']:
actions[RM_FETCHED].append(dist)
actions[FETCH].append(dist)
except KeyError:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
else:
actions[FETCH].append(dist)
actions[RM_EXTRACTED].append(dist)
actions[EXTRACT].append(dist)
if isfile(join(prefix, 'conda-meta', dist + '.json')):
actions[UNLINK].append(dist)
actions[LINK].append(dist)
return actions
# -------------------------------------------------------------------
def is_root_prefix(prefix):
return abspath(prefix) == abspath(config.root_dir)
def dist2spec3v(dist):
name, version, unused_build = dist.rsplit('-', 2)
return '%s %s*' % (name, version[:3])
def add_defaults_to_specs(r, linked, specs):
# TODO: This should use the pinning mechanism. But don't change the API:
# cas uses it.
if r.explicit(specs):
return
log.debug('H0 specs=%r' % specs)
names_linked = {install.name_dist(dist): dist for dist in linked}
names_ms = {MatchSpec(s).name: MatchSpec(s) for s in specs}
for name, def_ver in [('python', config.default_python),]:
#('numpy', config.default_numpy)]:
ms = names_ms.get(name)
if ms and ms.strictness > 1:
# if any of the specifications mention the Python/Numpy version,
# we don't need to add the default spec
log.debug('H1 %s' % name)
continue
any_depends_on = any(ms2.name == name
for spec in specs
for fn in r.get_max_dists(MatchSpec(spec))
for ms2 in r.ms_depends(fn))
log.debug('H2 %s %s' % (name, any_depends_on))
if not any_depends_on and name not in names_ms:
# if nothing depends on Python/Numpy AND the Python/Numpy is not
# specified, we don't need to add the default spec
log.debug('H2A %s' % name)
continue
if (any_depends_on and len(specs) >= 1 and
MatchSpec(specs[0]).strictness == 3):
# if something depends on Python/Numpy, but the spec is very
# explicit, we also don't need to add the default spec
log.debug('H2B %s' % name)
continue
if name in names_linked:
# if Python/Numpy is already linked, we add that instead of the
# default
log.debug('H3 %s' % name)
specs.append(dist2spec3v(names_linked[name]))
continue
if (name, def_ver) in [('python', '3.3'), ('python', '3.4')]:
# Don't include Python 3 in the specs if this is the Python 3
# version of conda.
continue
specs.append('%s %s*' % (name, def_ver))
log.debug('HF specs=%r' % specs)
def get_pinned_specs(prefix):
pinfile = join(prefix, 'conda-meta', 'pinned')
if not exists(pinfile):
return []
with open(pinfile) as f:
return list(filter(len, f.read().strip().split('\n')))
def install_actions(prefix, index, specs, force=False, only_names=None, pinned=True, minimal_hint=False):
r = Resolve(index)
linked = install.linked(prefix)
if config.self_update and is_root_prefix(prefix):
specs.append('conda')
add_defaults_to_specs(r, linked, specs)
if pinned:
pinned_specs = get_pinned_specs(prefix)
specs += pinned_specs
# TODO: Improve error messages here
must_have = {}
for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
config.track_features, minimal_hint=minimal_hint):
dist = fn[:-8]
name = install.name_dist(dist)
if only_names and name not in only_names:
continue
must_have[name] = dist
if is_root_prefix(prefix):
if install.on_win:
for name in install.win_ignore_root:
if name in must_have:
del must_have[name]
for name in config.foreign:
if name in must_have:
del must_have[name]
else:
# discard conda from other environments
if 'conda' in must_have:
sys.exit("Error: 'conda' can only be installed into "
"root environment")
smh = sorted(must_have.values())
if force:
actions = force_linked_actions(smh, index, prefix)
else:
actions = ensure_linked_actions(smh, prefix)
if actions[LINK] and sys.platform != 'win32':
actions[SYMLINK_CONDA] = [config.root_dir]
for dist in sorted(linked):
name = install.name_dist(dist)
if name in must_have and dist != must_have[name]:
actions[UNLINK].append(dist)
return actions
def remove_actions(prefix, specs, pinned=True):
linked = install.linked(prefix)
mss = [MatchSpec(spec) for spec in specs]
pinned_specs = get_pinned_specs(prefix)
actions = defaultdict(list)
actions[PREFIX] = prefix
for dist in sorted(linked):
if any(ms.match('%s.tar.bz2' % dist) for ms in mss):
if pinned and any(MatchSpec(spec).match('%s.tar.bz2' % dist) for spec in
pinned_specs):
raise RuntimeError("Cannot remove %s because it is pinned. Use --no-pin to override." % dist)
actions[UNLINK].append(dist)
return actions
def remove_features_actions(prefix, index, features):
linked = install.linked(prefix)
r = Resolve(index)
actions = defaultdict(list)
actions[PREFIX] = prefix
_linked = [d + '.tar.bz2' for d in linked]
to_link = []
for dist in sorted(linked):
fn = dist + '.tar.bz2'
if fn not in index:
continue
if r.track_features(fn).intersection(features):
actions[UNLINK].append(dist)
if r.features(fn).intersection(features):
actions[UNLINK].append(dist)
subst = r.find_substitute(_linked, features, fn)
if subst:
to_link.append(subst[:-8])
if to_link:
actions.update(ensure_linked_actions(to_link, prefix))
return actions
def revert_actions(prefix, revision=-1):
h = History(prefix)
h.update()
try:
state = h.get_state(revision)
except IndexError:
sys.exit("Error: no such revision: %d" % revision)
curr = h.get_state()
if state == curr:
return {}
actions = ensure_linked_actions(state, prefix)
for dist in curr - state:
actions[UNLINK].append(dist)
return actions
# ---------------------------- EXECUTION --------------------------
def fetch(index, dist):
assert index is not None
fn = dist + '.tar.bz2'
fetch_pkg(index[fn])
def link(prefix, arg, index=None):
dist, pkgs_dir, lt = split_linkarg(arg)
install.link(pkgs_dir, prefix, dist, lt, index=index)
def cmds_from_plan(plan):
res = []
for line in plan:
log.debug(' %s' % line)
line = line.strip()
if not line or line.startswith('#'):
continue
res.append(line.split(None, 1))
return res
def execute_plan(plan, index=None, verbose=False):
if verbose:
from conda.console import setup_verbose_handlers
setup_verbose_handlers()
# set default prefix
prefix = config.root_dir
i = None
cmds = cmds_from_plan(plan)
for cmd, arg in cmds:
if i is not None and cmd in progress_cmds:
i += 1
getLogger('progress.update').info((install.name_dist(arg), i))
if cmd == PREFIX:
prefix = arg
elif cmd == PRINT:
getLogger('print').info(arg)
elif cmd == FETCH:
fetch(index, arg)
elif cmd == PROGRESS:
i = 0
maxval = int(arg)
getLogger('progress.start').info(maxval)
elif cmd == EXTRACT:
install.extract(config.pkgs_dirs[0], arg)
elif cmd == RM_EXTRACTED:
install.rm_extracted(config.pkgs_dirs[0], arg)
elif cmd == RM_FETCHED:
install.rm_fetched(config.pkgs_dirs[0], arg)
elif cmd == LINK:
link(prefix, arg, index=index)
elif cmd == UNLINK:
install.unlink(prefix, arg)
elif cmd == SYMLINK_CONDA:
install.symlink_conda(prefix, arg)
else:
raise Exception("Did not expect command: %r" % cmd)
if i is not None and cmd in progress_cmds and maxval == i:
i = None
getLogger('progress.stop').info(None)
install.messages(prefix)
def execute_actions(actions, index=None, verbose=False):
plan = plan_from_actions(actions)
with History(actions[PREFIX]):
execute_plan(plan, index, verbose)
if __name__ == '__main__':
# for testing new revert_actions() only
from pprint import pprint
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
| conda/cli/main_remove.py
--- a/conda/cli/main_remove.py
+++ b/conda/cli/main_remove.py
@@ -63,6 +63,7 @@ def execute(args, parser):
from conda.api import get_index
from conda.cli import pscheck
from conda.install import rm_rf, linked
+ from conda import config
if not (args.all or args.package_names):
sys.exit('Error: no package names supplied,\n'
@@ -71,12 +72,11 @@ def execute(args, parser):
prefix = common.get_prefix(args)
common.check_write('remove', prefix)
- index = None
+ common.ensure_override_channels_requires_channel(args)
+ channel_urls = args.channel or ()
+ index = get_index(channel_urls=channel_urls,
+ prepend=not args.override_channels)
if args.features:
- common.ensure_override_channels_requires_channel(args)
- channel_urls = args.channel or ()
- index = get_index(channel_urls=channel_urls,
- prepend=not args.override_channels)
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
conda/plan.py
--- a/conda/plan.py
+++ b/conda/plan.py
@@ -20,7 +20,7 @@
from conda import install
from conda.fetch import fetch_pkg
from conda.history import History
-from conda.resolve import MatchSpec, Resolve
+from conda.resolve import MatchSpec, Resolve, Package
from conda.utils import md5_file, human_bytes
log = getLogger(__name__)
@@ -60,7 +60,7 @@ def split_linkarg(arg):
linktype = install.LINK_HARD
return dist, pkgs_dir, int(linktype)
-def display_actions(actions, index=None):
+def display_actions(actions, index):
if actions.get(FETCH):
print("\nThe following packages will be downloaded:\n")
@@ -79,19 +79,113 @@ def display_actions(actions, index=None):
print(" " * 43 + "Total: %14s" %
human_bytes(sum(index[dist + '.tar.bz2']['size']
for dist in actions[FETCH])))
- if actions.get(UNLINK):
- print("\nThe following packages will be UN-linked:\n")
- print_dists([
- (dist, None)
- for dist in actions[UNLINK]])
- if actions.get(LINK):
- print("\nThe following packages will be linked:\n")
- lst = []
- for arg in actions[LINK]:
- dist, pkgs_dir, lt = split_linkarg(arg)
- extra = ' %s' % install.link_name_map.get(lt)
- lst.append((dist, extra))
- print_dists(lst)
+
+ # package -> [oldver-oldbuild, newver-newbuild]
+ packages = defaultdict(lambda: list(('', '')))
+ features = defaultdict(lambda: list(('', '')))
+
+ # This assumes each package will appear in LINK no more than once.
+ Packages = {}
+ linktypes = {}
+ for arg in actions.get(LINK, []):
+ dist, pkgs_dir, lt = split_linkarg(arg)
+ pkg, ver, build = dist.rsplit('-', 2)
+ packages[pkg][1] = ver + '-' + build
+ Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
+ linktypes[pkg] = lt
+ features[pkg][1] = index[dist + '.tar.bz2'].get('features', '')
+ for arg in actions.get(UNLINK, []):
+ dist, pkgs_dir, lt = split_linkarg(arg)
+ pkg, ver, build = dist.rsplit('-', 2)
+ packages[pkg][0] = ver + '-' + build
+ Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
+ features[pkg][0] = index[dist + '.tar.bz2'].get('features', '')
+
+ # Put a minimum length here---. .--For the :
+ # v v
+ maxpkg = max(len(max(packages or [''], key=len)), 0) + 1
+ maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0])
+ maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1])
+ maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0])
+ maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1])
+ maxoldchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
+ packages[pkg][0]].channel) for pkg in packages if packages[pkg][0]] or
+ [''], key=len))
+ maxnewchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
+ packages[pkg][1]].channel) for pkg in packages if packages[pkg][1]] or
+ [''], key=len))
+ new = {pkg for pkg in packages if not packages[pkg][0]}
+ removed = {pkg for pkg in packages if not packages[pkg][1]}
+ updated = set()
+ downgraded = set()
+ oldfmt = {}
+ newfmt = {}
+ for pkg in packages:
+ # That's right. I'm using old-style string formatting to generate a
+ # string with new-style string formatting.
+ oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
+ if config.show_channel_urls:
+ oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel
+ if packages[pkg][0]:
+ newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
+ else:
+ newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver)
+ if config.show_channel_urls:
+ newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel
+ # TODO: Should we also care about the old package's link type?
+ if pkg in linktypes and linktypes[pkg] != install.LINK_HARD:
+ newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]]
+
+ if features[pkg][0]:
+ oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
+ if features[pkg][1]:
+ newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
+
+ if pkg in new or pkg in removed:
+ continue
+ P0 = Packages[pkg + '-' + packages[pkg][0]]
+ P1 = Packages[pkg + '-' + packages[pkg][1]]
+ try:
+ # <= here means that unchanged packages will be put in updated
+ newer = (P0.name, P0.norm_version, P0.build_number) <= (P1.name, P1.norm_version, P1.build_number)
+ except TypeError:
+ newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number)
+ if newer:
+ updated.add(pkg)
+ else:
+ downgraded.add(pkg)
+
+ arrow = ' --> '
+ lead = ' '*4
+
+ def format(s, pkg):
+ channel = ['', '']
+ for i in range(2):
+ if packages[pkg][i]:
+ channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel)
+ return lead + s.format(pkg=pkg+':', vers=packages[pkg],
+ channel=channel, features=features[pkg])
+
+ if new:
+ print("\nThe following NEW packages will be INSTALLED:\n")
+ for pkg in sorted(new):
+ print(format(newfmt[pkg], pkg))
+
+ if removed:
+ print("\nThe following packages will be REMOVED:\n")
+ for pkg in sorted(removed):
+ print(format(oldfmt[pkg], pkg))
+
+ if updated:
+ print("\nThe following packages will be UPDATED:\n")
+ for pkg in sorted(updated):
+ print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
+
+ if downgraded:
+ print("\nThe following packages will be DOWNGRADED:\n")
+ for pkg in sorted(downgraded):
+ print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
+
print()
# the order matters here, don't change it |
NoPackagesFound does not work correctly for missing recursive dependencies
| conda/resolve.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from itertools import combinations
from collections import defaultdict
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints)
from conda.console import setup_handlers
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
if version.endswith('.dev'):
version += '0'
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
return version
class NoPackagesFound(RuntimeError):
def __init__(self, msg, pkg):
super(NoPackagesFound, self).__init__(msg)
self.pkg = pkg
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
if not 'depends' in self.index[fn]:
raise NoPackagesFound('Bad metadata for %s' % fn, fn)
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) ==\
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
continue
res[pkg2.fn] = pkg2
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield [-v1, -v2]
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield clause
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield clause
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield clause
# Don't install any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield [-v[fn]]
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield clause
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
found = False
notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
found = True
continue
try:
dists.update(self.all_deps(pkg.fn, max_only=max_only))
except NoPackagesFound as e:
# Ignore any package that has nonexisting dependencies.
if e.pkg not in notfound:
notfound.append(e.pkg)
else:
dists[pkg.fn] = pkg
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None)
return dists
def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
try:
dists = self.get_dists(specs, max_only=True)
except NoPackagesFound:
# Handle packages that are not included because some dependencies
# couldn't be found.
pass
else:
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = self.gen_clauses(v, dists, specs, features)
solutions = min_sat(clauses)
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = list(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Check the common case first
dotlog.debug("Building the constraint with rhs: [0, 0]")
constraints = list(generate_constraints(eq, m, [0, 0], alg=alg))
# Only relevant for build_BDD
if constraints and constraints[0] == [false]:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and constraints[0] == [true]:
constraints = []
dotlog.debug("Checking for solutions with rhs: [0, 0]")
solution = sat(clauses + constraints)
if not solution:
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: ')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
def version_constraints(lo, hi):
return list(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
constraints = bisect_constraints(0, max_rhs, clauses, version_constraints)
dotlog.debug("Finding the minimal solution")
solutions = min_sat(clauses + constraints, N=m+1)
assert solutions, (specs, features)
if len(solutions) > 1:
print('Warning:', len(solutions), "possible package resolutions:")
for sol in solutions:
print('\t', [w[lit] for lit in sol if 0 < lit <= m])
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
hint = []
# Try to find the largest satisfiable subset
found = False
for i in range(len(specs), 0, -1):
if found:
break
for comb in combinations(specs, i):
try:
self.solve2(comb, features, guess=False)
except RuntimeError:
pass
else:
rem = set(specs) - set(comb)
rem.discard('conda')
if len(rem) == 1:
hint.append("%s" % rem.pop())
else:
hint.append("%s" % ' and '.join(rem))
found = True
if not hint:
return ''
if len(hint) == 1:
return ("\nHint: %s has a conflict with the remaining packages" %
hint[0])
return ("""
Hint: the following combinations of packages create a conflict with the
remaining packages:
- %s""" % '\n - '.join(hint))
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
<|code_end|>
| conda/resolve.py
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from itertools import combinations
from collections import defaultdict
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints)
from conda.console import setup_handlers
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
if version.endswith('.dev'):
version += '0'
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
return version
class NoPackagesFound(RuntimeError):
def __init__(self, msg, pkg):
super(NoPackagesFound, self).__init__(msg)
self.pkg = pkg
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
if not 'depends' in self.index[fn]:
raise NoPackagesFound('Bad metadata for %s' % fn, fn)
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) ==\
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
found = False
notfound = []
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
found = True
continue
try:
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
except NoPackagesFound as e:
if e.pkg not in notfound:
notfound.append(e.pkg)
else:
found = True
res[pkg2.fn] = pkg2
if not found:
raise NoPackagesFound("Could not find some dependencies "
"for %s: %s" % (ms, ', '.join(notfound)), str(ms))
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield [-v1, -v2]
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield clause
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield clause
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield clause
# Don't install any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield [-v[fn]]
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield clause
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
found = False
notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
found = True
continue
try:
dists.update(self.all_deps(pkg.fn, max_only=max_only))
except NoPackagesFound as e:
# Ignore any package that has nonexisting dependencies.
if e.pkg not in notfound:
notfound.append(e.pkg)
else:
dists[pkg.fn] = pkg
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), spec)
return dists
def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
try:
dists = self.get_dists(specs, max_only=True)
except NoPackagesFound:
# Handle packages that are not included because some dependencies
# couldn't be found.
pass
else:
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = self.gen_clauses(v, dists, specs, features)
solutions = min_sat(clauses)
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = list(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Check the common case first
dotlog.debug("Building the constraint with rhs: [0, 0]")
constraints = list(generate_constraints(eq, m, [0, 0], alg=alg))
# Only relevant for build_BDD
if constraints and constraints[0] == [false]:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and constraints[0] == [true]:
constraints = []
dotlog.debug("Checking for solutions with rhs: [0, 0]")
solution = sat(clauses + constraints)
if not solution:
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: ')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
def version_constraints(lo, hi):
return list(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
constraints = bisect_constraints(0, max_rhs, clauses, version_constraints)
dotlog.debug("Finding the minimal solution")
solutions = min_sat(clauses + constraints, N=m+1)
assert solutions, (specs, features)
if len(solutions) > 1:
print('Warning:', len(solutions), "possible package resolutions:")
for sol in solutions:
print('\t', [w[lit] for lit in sol if 0 < lit <= m])
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
hint = []
# Try to find the largest satisfiable subset
found = False
for i in range(len(specs), 0, -1):
if found:
break
for comb in combinations(specs, i):
try:
self.solve2(comb, features, guess=False)
except RuntimeError:
pass
else:
rem = set(specs) - set(comb)
rem.discard('conda')
if len(rem) == 1:
hint.append("%s" % rem.pop())
else:
hint.append("%s" % ' and '.join(rem))
found = True
if not hint:
return ''
if len(hint) == 1:
return ("\nHint: %s has a conflict with the remaining packages" %
hint[0])
return ("""
Hint: the following combinations of packages create a conflict with the
remaining packages:
- %s""" % '\n - '.join(hint))
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
| conda/resolve.py
--- a/conda/resolve.py
+++ b/conda/resolve.py
@@ -278,12 +278,25 @@ def all_deps(self, root_fn, max_only=False):
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
+ found = False
+ notfound = []
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
+ found = True
continue
- res[pkg2.fn] = pkg2
- if ms.strictness < 3:
- add_dependents(pkg2.fn, max_only=max_only)
+ try:
+ if ms.strictness < 3:
+ add_dependents(pkg2.fn, max_only=max_only)
+ except NoPackagesFound as e:
+ if e.pkg not in notfound:
+ notfound.append(e.pkg)
+ else:
+ found = True
+ res[pkg2.fn] = pkg2
+
+ if not found:
+ raise NoPackagesFound("Could not find some dependencies "
+ "for %s: %s" % (ms, ', '.join(notfound)), str(ms))
add_dependents(root_fn, max_only=max_only)
return res
@@ -394,7 +407,7 @@ def get_dists(self, specs, max_only=False):
dists[pkg.fn] = pkg
found = True
if not found:
- raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None)
+ raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), spec)
return dists
|
TLS does not appear to be verified
As far as I can tell conda is just using urllib2 which doesn't verify SSL at all in Python 2.x and in Python 3.x it doesn't do it by default. This means that even recipes which use a https link without a md5 hash there is a simple MITM code execution attack.
<!---
@huboard:{"order":158.5,"custom_state":""}
-->
| conda/connection.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from logging import getLogger
from conda.compat import PY3, string_types
from conda.compat import iteritems, input
from conda.config import get_proxy_servers
if PY3:
# Python 3.x
import urllib.request as urllib2
from urllib import parse as urlparse
else:
# Python 2.x
import urllib2
import urlparse
log = getLogger(__name__)
# 1. get proxies if needed. a proxy for each protocol
# 2. handle authentication
# basic, digest, and nltm (windows) authentications should be handled.
# 3. handle any protocol
# typically http, https, ftp
# 1. get the proxies list
# urllib can only get proxies on windows and mac. so on linux or if the user
# wants to specify the proxy there has to be a way to do that. TODO get proxies
# from condarc and overrwrite any system proxies
# the proxies are in a dict {'http':'http://proxy:8080'}
# protocol:proxyserver
proxies_dict = get_proxy_servers() or urllib2.getproxies()
#2. handle authentication
proxypwdmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
def get_userandpass(proxytype='', realm=''):
"""a function to get username and password from terminal.
can be replaced with anything like some gui"""
import getpass
uname = input(proxytype + ' proxy username:')
pword = getpass.getpass()
return uname, pword
# a procedure that needs to be executed with changes to handlers
def installopener():
opener = urllib2.build_opener(
urllib2.ProxyHandler(proxies_dict),
urllib2.ProxyBasicAuthHandler(proxypwdmgr),
urllib2.ProxyDigestAuthHandler(proxypwdmgr),
urllib2.HTTPHandler,
)
# digest auth may not work with all proxies
# http://bugs.python.org/issue16095
# could add windows/nltm authentication here
#opener=urllib2.build_opener(urllib2.ProxyHandler(proxies_dict), urllib2.HTTPHandler)
urllib2.install_opener(opener)
firstconnection = True
#i made this func so i wouldn't alter the original code much
def connectionhandled_urlopen(request):
"""handles aspects of establishing the connection with the remote"""
installopener()
if isinstance(request, string_types):
request = urllib2.Request(request)
try:
return urllib2.urlopen(request)
except urllib2.HTTPError as HTTPErrorinst:
if HTTPErrorinst.code in (407, 401):
# proxy authentication error
# ...(need to auth) or supplied creds failed
if HTTPErrorinst.code == 401:
log.debug('proxy authentication failed')
#authenticate and retry
uname, pword = get_userandpass()
#assign same user+pwd to all protocols (a reasonable assumption) to
#decrease user input. otherwise you'd need to assign a user/pwd to
#each proxy type
if firstconnection == True:
for aprotocol, aproxy in iteritems(proxies_dict):
proxypwdmgr.add_password(None, aproxy, uname, pword)
firstconnection == False
else: #...assign a uname pwd for the specific protocol proxy type
assert(firstconnection == False)
protocol = urlparse.urlparse(request.get_full_url()).scheme
proxypwdmgr.add_password(None, proxies_dict[protocol],
uname, pword)
installopener()
# i'm uncomfortable with this
# but i just want to exec to start from the top again
return connectionhandled_urlopen(request)
raise
except:
raise
<|code_end|>
conda/fetch.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import bz2
import sys
import json
import shutil
import hashlib
import tempfile
from logging import getLogger
from os.path import basename, isdir, join
from conda import config
from conda.utils import memoized
from conda.connection import connectionhandled_urlopen
from conda.compat import PY3, itervalues, get_http_value
from conda.lock import Locked
if PY3:
import urllib.request as urllib2
else:
import urllib2
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
fail_unknown_host = False
retries = 3
def create_cache_dir():
cache_dir = join(config.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
return '%s.json' % hashlib.md5(url.encode('utf-8')).hexdigest()
def add_http_value_to_dict(u, http_key, d, dict_key):
value = get_http_value(u, http_key)
if value:
d[dict_key] = value
def fetch_repodata(url, cache_dir=None, use_cache=False):
dotlog.debug("fetching repodata: %s ..." % url)
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
cache = json.load(open(cache_path))
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
request = urllib2.Request(url + 'repodata.json.bz2')
if '_etag' in cache:
request.add_header('If-None-Match', cache['_etag'])
if '_mod' in cache:
request.add_header('If-Modified-Since', cache['_mod'])
try:
u = connectionhandled_urlopen(request)
data = u.read()
u.close()
cache = json.loads(bz2.decompress(data).decode('utf-8'))
add_http_value_to_dict(u, 'Etag', cache, '_etag')
add_http_value_to_dict(u, 'Last-Modified', cache, '_mod')
except ValueError:
raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url)
except urllib2.HTTPError as e:
msg = "HTTPError: %d %s %s\n" % (e.code, e.msg, url)
log.debug(msg)
if e.code != 304:
raise RuntimeError(msg)
except urllib2.URLError as e:
sys.stderr.write("Error: unknown host: %s (%r)\n" % (url, e))
if fail_unknown_host:
sys.exit(1)
cache['_url'] = url
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
index = {}
stdoutlog.info("Fetching package metadata: ")
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("\nError: URL '%s' not in allowed channels" % url)
repodata = fetch_repodata(url, use_cache=use_cache)
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for dn in os.listdir(pkgs_dir):
fn = dn + '.tar.bz2'
if fn in index:
continue
try:
with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if 'depends' not in meta:
continue
log.debug("adding cached pkg to index: %s" % fn)
index[fn] = meta
return index
def fetch_pkg(info, dst_dir=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
pp = path + '.part'
with Locked(dst_dir):
for x in range(retries):
try:
fi = connectionhandled_urlopen(url)
except IOError:
log.debug("attempt %d failed at urlopen" % x)
continue
if fi is None:
log.debug("could not fetch (urlopen returned None)")
continue
n = 0
h = hashlib.new('md5')
getLogger('fetch.start').info((fn, info['size']))
need_retry = False
try:
fo = open(pp, 'wb')
except IOError:
raise RuntimeError("Could not open %r for writing. "
"Permissions problem or missing directory?" % pp)
while True:
try:
chunk = fi.read(16384)
except IOError:
need_retry = True
break
if not chunk:
break
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
h.update(chunk)
n += len(chunk)
getLogger('fetch.update').info(n)
fo.close()
if need_retry:
continue
fi.close()
getLogger('fetch.stop').info(None)
if h.hexdigest() != info['md5']:
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (fn, h.hexdigest(), info['md5']))
try:
os.rename(pp, path)
except OSError:
raise RuntimeError("Could not rename %r to %r." % (pp, path))
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
return
raise RuntimeError("Could not locate '%s'" % url)
def download(url, dst_path):
try:
u = connectionhandled_urlopen(url)
except IOError:
raise RuntimeError("Could not open '%s'" % url)
except ValueError as e:
raise RuntimeError(e)
size = get_http_value(u, 'Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
fo = open(dst_path, 'wb')
while True:
chunk = u.read(16384)
if not chunk:
break
fo.write(chunk)
n += len(chunk)
if size:
getLogger('fetch.update').info(n)
fo.close()
u.close()
if size:
getLogger('fetch.stop').info(None)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from conda.console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
| conda/connection.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from logging import getLogger
import re
import mimetypes
import os
import email
import base64
import ftplib
import cgi
from io import BytesIO
from conda.compat import urlparse, StringIO
from conda.config import get_proxy_servers
import requests
RETRIES = 3
log = getLogger(__name__)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
self.proxies = get_proxy_servers()
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr,}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = code.split()[0]
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
conda/fetch.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import bz2
import json
import shutil
import hashlib
import tempfile
from logging import getLogger
from os.path import basename, isdir, join
from conda import config
from conda.utils import memoized
from conda.connection import CondaSession
from conda.compat import itervalues, get_http_value
from conda.lock import Locked
import requests
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(config.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
return '%s.json' % hashlib.md5(url.encode('utf-8')).hexdigest()
def add_http_value_to_dict(u, http_key, d, dict_key):
value = get_http_value(u, http_key)
if value:
d[dict_key] = value
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
dotlog.debug("fetching repodata: %s ..." % url)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
cache = json.load(open(cache_path))
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_tag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2', headers=headers)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
except ValueError:
raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url)
except requests.exceptions.HTTPError as e:
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = url
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
index = {}
stdoutlog.info("Fetching package metadata: ")
session = CondaSession()
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("\nError: URL '%s' not in allowed channels" % url)
repodata = fetch_repodata(url, use_cache=use_cache, session=session)
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for dn in os.listdir(pkgs_dir):
fn = dn + '.tar.bz2'
if fn in index:
continue
try:
with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if 'depends' not in meta:
continue
log.debug("adding cached pkg to index: %s" % fn)
index[fn] = meta
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
session = session or CondaSession()
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
def download(url, dst_path, session=None, md5=None, urlstxt=False):
pp = dst_path + '.part'
dst_dir = os.path.split(dst_path)[0]
session = session or CondaSession()
with Locked(dst_dir):
try:
resp = session.get(url, stream=True)
except IOError:
raise RuntimeError("Could not open '%s'" % url)
except requests.exceptions.HTTPError as e:
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
for chunk in resp.iter_content(2**14):
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
n += len(chunk)
if size:
getLogger('fetch.update').info(n)
except IOError:
raise RuntimeError("Could not open %r for writing. "
"Permissions problem or missing directory?" % pp)
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" % (pp,
dst_path, e))
if urlstxt:
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from conda.console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
| conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -7,106 +7,368 @@
from __future__ import print_function, division, absolute_import
from logging import getLogger
+import re
+import mimetypes
+import os
+import email
+import base64
+import ftplib
+import cgi
+from io import BytesIO
-from conda.compat import PY3, string_types
-from conda.compat import iteritems, input
+from conda.compat import urlparse, StringIO
from conda.config import get_proxy_servers
-if PY3:
- # Python 3.x
- import urllib.request as urllib2
- from urllib import parse as urlparse
-else:
- # Python 2.x
- import urllib2
- import urlparse
+import requests
+RETRIES = 3
log = getLogger(__name__)
-# 1. get proxies if needed. a proxy for each protocol
-# 2. handle authentication
-# basic, digest, and nltm (windows) authentications should be handled.
-# 3. handle any protocol
-# typically http, https, ftp
-
-# 1. get the proxies list
-# urllib can only get proxies on windows and mac. so on linux or if the user
-# wants to specify the proxy there has to be a way to do that. TODO get proxies
-# from condarc and overrwrite any system proxies
-# the proxies are in a dict {'http':'http://proxy:8080'}
-# protocol:proxyserver
-proxies_dict = get_proxy_servers() or urllib2.getproxies()
-
-#2. handle authentication
-
-proxypwdmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
-
-
-def get_userandpass(proxytype='', realm=''):
- """a function to get username and password from terminal.
- can be replaced with anything like some gui"""
- import getpass
-
- uname = input(proxytype + ' proxy username:')
- pword = getpass.getpass()
- return uname, pword
-
-
-# a procedure that needs to be executed with changes to handlers
-def installopener():
- opener = urllib2.build_opener(
- urllib2.ProxyHandler(proxies_dict),
- urllib2.ProxyBasicAuthHandler(proxypwdmgr),
- urllib2.ProxyDigestAuthHandler(proxypwdmgr),
- urllib2.HTTPHandler,
- )
- # digest auth may not work with all proxies
- # http://bugs.python.org/issue16095
- # could add windows/nltm authentication here
- #opener=urllib2.build_opener(urllib2.ProxyHandler(proxies_dict), urllib2.HTTPHandler)
-
- urllib2.install_opener(opener)
-
-
-firstconnection = True
-#i made this func so i wouldn't alter the original code much
-def connectionhandled_urlopen(request):
- """handles aspects of establishing the connection with the remote"""
-
- installopener()
-
- if isinstance(request, string_types):
- request = urllib2.Request(request)
-
- try:
- return urllib2.urlopen(request)
-
- except urllib2.HTTPError as HTTPErrorinst:
- if HTTPErrorinst.code in (407, 401):
- # proxy authentication error
- # ...(need to auth) or supplied creds failed
- if HTTPErrorinst.code == 401:
- log.debug('proxy authentication failed')
- #authenticate and retry
- uname, pword = get_userandpass()
- #assign same user+pwd to all protocols (a reasonable assumption) to
- #decrease user input. otherwise you'd need to assign a user/pwd to
- #each proxy type
- if firstconnection == True:
- for aprotocol, aproxy in iteritems(proxies_dict):
- proxypwdmgr.add_password(None, aproxy, uname, pword)
- firstconnection == False
- else: #...assign a uname pwd for the specific protocol proxy type
- assert(firstconnection == False)
- protocol = urlparse.urlparse(request.get_full_url()).scheme
- proxypwdmgr.add_password(None, proxies_dict[protocol],
- uname, pword)
- installopener()
- # i'm uncomfortable with this
- # but i just want to exec to start from the top again
- return connectionhandled_urlopen(request)
- raise
-
- except:
- raise
+# Modified from code in pip/download.py:
+
+# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+class CondaSession(requests.Session):
+
+ timeout = None
+
+ def __init__(self, *args, **kwargs):
+ retries = kwargs.pop('retries', RETRIES)
+
+ super(CondaSession, self).__init__(*args, **kwargs)
+
+ self.proxies = get_proxy_servers()
+
+ # Configure retries
+ if retries:
+ http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
+ self.mount("http://", http_adapter)
+ self.mount("https://", http_adapter)
+
+ # Enable file:// urls
+ self.mount("file://", LocalFSAdapter())
+
+ # Enable ftp:// urls
+ self.mount("ftp://", FTPAdapter())
+
+class LocalFSAdapter(requests.adapters.BaseAdapter):
+
+ def send(self, request, stream=None, timeout=None, verify=None, cert=None,
+ proxies=None):
+ pathname = url_to_path(request.url)
+
+ resp = requests.models.Response()
+ resp.status_code = 200
+ resp.url = request.url
+
+ try:
+ stats = os.stat(pathname)
+ except OSError as exc:
+ resp.status_code = 404
+ resp.raw = exc
+ else:
+ modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
+ content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
+ resp.headers = requests.structures.CaseInsensitiveDict({
+ "Content-Type": content_type,
+ "Content-Length": stats.st_size,
+ "Last-Modified": modified,
+ })
+
+ resp.raw = open(pathname, "rb")
+ resp.close = resp.raw.close
+
+ return resp
+
+ def close(self):
+ pass
+
+def url_to_path(url):
+ """
+ Convert a file: URL to a path.
+ """
+ assert url.startswith('file:'), (
+ "You can only turn file: urls into filenames (not %r)" % url)
+ path = url[len('file:'):].lstrip('/')
+ path = urlparse.unquote(path)
+ if _url_drive_re.match(path):
+ path = path[0] + ':' + path[2:]
+ else:
+ path = '/' + path
+ return path
+
+_url_drive_re = re.compile('^([a-z])[:|]', re.I)
+
+# Taken from requests-ftp
+# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
+
+# Copyright 2012 Cory Benfield
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class FTPAdapter(requests.adapters.BaseAdapter):
+ '''A Requests Transport Adapter that handles FTP urls.'''
+ def __init__(self):
+ super(FTPAdapter, self).__init__()
+
+ # Build a dictionary keyed off the methods we support in upper case.
+ # The values of this dictionary should be the functions we use to
+ # send the specific queries.
+ self.func_table = {'LIST': self.list,
+ 'RETR': self.retr,
+ 'STOR': self.stor,
+ 'NLST': self.nlst,
+ 'GET': self.retr,}
+
+ def send(self, request, **kwargs):
+ '''Sends a PreparedRequest object over FTP. Returns a response object.
+ '''
+ # Get the authentication from the prepared request, if any.
+ auth = self.get_username_password_from_header(request)
+
+ # Next, get the host and the path.
+ host, port, path = self.get_host_and_path_from_url(request)
+
+ # Sort out the timeout.
+ timeout = kwargs.get('timeout', None)
+
+ # Establish the connection and login if needed.
+ self.conn = ftplib.FTP()
+ self.conn.connect(host, port, timeout)
+
+ if auth is not None:
+ self.conn.login(auth[0], auth[1])
+ else:
+ self.conn.login()
+
+ # Get the method and attempt to find the function to call.
+ resp = self.func_table[request.method](path, request)
+
+ # Return the response.
+ return resp
+
+ def close(self):
+ '''Dispose of any internal state.'''
+ # Currently this is a no-op.
+ pass
+
+ def list(self, path, request):
+ '''Executes the FTP LIST command on the given path.'''
+ data = StringIO()
+
+ # To ensure the StringIO gets cleaned up, we need to alias its close
+ # method to the release_conn() method. This is a dirty hack, but there
+ # you go.
+ data.release_conn = data.close
+
+ self.conn.cwd(path)
+ code = self.conn.retrbinary('LIST', data_callback_factory(data))
+
+ # When that call has finished executing, we'll have all our data.
+ response = build_text_response(request, data, code)
+
+ # Close the connection.
+ self.conn.close()
+
+ return response
+
+ def retr(self, path, request):
+ '''Executes the FTP RETR command on the given path.'''
+ data = BytesIO()
+
+ # To ensure the BytesIO gets cleaned up, we need to alias its close
+ # method. See self.list().
+ data.release_conn = data.close
+
+ code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
+
+ response = build_binary_response(request, data, code)
+
+ # Close the connection.
+ self.conn.close()
+
+ return response
+
+ def stor(self, path, request):
+ '''Executes the FTP STOR command on the given path.'''
+
+ # First, get the file handle. We assume (bravely)
+ # that there is only one file to be sent to a given URL. We also
+ # assume that the filename is sent as part of the URL, not as part of
+ # the files argument. Both of these assumptions are rarely correct,
+ # but they are easy.
+ data = parse_multipart_files(request)
+
+ # Split into the path and the filename.
+ path, filename = os.path.split(path)
+
+ # Switch directories and upload the data.
+ self.conn.cwd(path)
+ code = self.conn.storbinary('STOR ' + filename, data)
+
+ # Close the connection and build the response.
+ self.conn.close()
+
+ response = build_binary_response(request, BytesIO(), code)
+
+ return response
+
+ def nlst(self, path, request):
+ '''Executes the FTP NLST command on the given path.'''
+ data = StringIO()
+
+ # Alias the close method.
+ data.release_conn = data.close
+
+ self.conn.cwd(path)
+ code = self.conn.retrbinary('NLST', data_callback_factory(data))
+
+ # When that call has finished executing, we'll have all our data.
+ response = build_text_response(request, data, code)
+
+ # Close the connection.
+ self.conn.close()
+
+ return response
+
+ def get_username_password_from_header(self, request):
+ '''Given a PreparedRequest object, reverse the process of adding HTTP
+ Basic auth to obtain the username and password. Allows the FTP adapter
+ to piggyback on the basic auth notation without changing the control
+ flow.'''
+ auth_header = request.headers.get('Authorization')
+
+ if auth_header:
+ # The basic auth header is of the form 'Basic xyz'. We want the
+ # second part. Check that we have the right kind of auth though.
+ encoded_components = auth_header.split()[:2]
+ if encoded_components[0] != 'Basic':
+ raise AuthError('Invalid form of Authentication used.')
+ else:
+ encoded = encoded_components[1]
+
+ # Decode the base64 encoded string.
+ decoded = base64.b64decode(encoded)
+
+ # The string is of the form 'username:password'. Split on the
+ # colon.
+ components = decoded.split(':')
+ username = components[0]
+ password = components[1]
+ return (username, password)
+ else:
+ # No auth header. Return None.
+ return None
+
+ def get_host_and_path_from_url(self, request):
+ '''Given a PreparedRequest object, split the URL in such a manner as to
+ determine the host and the path. This is a separate method to wrap some
+ of urlparse's craziness.'''
+ url = request.url
+ # scheme, netloc, path, params, query, fragment = urlparse(url)
+ parsed = urlparse.urlparse(url)
+ path = parsed.path
+
+ # If there is a slash on the front of the path, chuck it.
+ if path[0] == '/':
+ path = path[1:]
+
+ host = parsed.hostname
+ port = parsed.port or 0
+
+ return (host, port, path)
+
+def data_callback_factory(variable):
+ '''Returns a callback suitable for use by the FTP library. This callback
+ will repeatedly save data into the variable provided to this function. This
+ variable should be a file-like structure.'''
+ def callback(data):
+ variable.write(data)
+ return
+
+ return callback
+
+class AuthError(Exception):
+ '''Denotes an error with authentication.'''
+ pass
+
+def build_text_response(request, data, code):
+ '''Build a response for textual data.'''
+ return build_response(request, data, code, 'ascii')
+
+def build_binary_response(request, data, code):
+ '''Build a response for data whose encoding is unknown.'''
+ return build_response(request, data, code, None)
+
+def build_response(request, data, code, encoding):
+ '''Builds a response object from the data returned by ftplib, using the
+ specified encoding.'''
+ response = requests.Response()
+
+ response.encoding = encoding
+
+ # Fill in some useful fields.
+ response.raw = data
+ response.url = request.url
+ response.request = request
+ response.status_code = code.split()[0]
+
+ # Make sure to seek the file-like raw object back to the start.
+ response.raw.seek(0)
+
+ # Run the response hook.
+ response = requests.hooks.dispatch_hook('response', request.hooks, response)
+ return response
+
+def parse_multipart_files(request):
+ '''Given a prepared reqest, return a file-like object containing the
+ original data. This is pretty hacky.'''
+ # Start by grabbing the pdict.
+ _, pdict = cgi.parse_header(request.headers['Content-Type'])
+
+ # Now, wrap the multipart data in a BytesIO buffer. This is annoying.
+ buf = BytesIO()
+ buf.write(request.body)
+ buf.seek(0)
+
+ # Parse the data. Simply take the first file.
+ data = cgi.parse_multipart(buf, pdict)
+ _, filedata = data.popitem()
+ buf.close()
+
+ # Get a BytesIO now, and write the file into it.
+ buf = BytesIO()
+ buf.write(''.join(filedata))
+ buf.seek(0)
+
+ return buf
conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -8,7 +8,6 @@
import os
import bz2
-import sys
import json
import shutil
import hashlib
@@ -18,22 +17,18 @@
from conda import config
from conda.utils import memoized
-from conda.connection import connectionhandled_urlopen
-from conda.compat import PY3, itervalues, get_http_value
+from conda.connection import CondaSession
+from conda.compat import itervalues, get_http_value
from conda.lock import Locked
-if PY3:
- import urllib.request as urllib2
-else:
- import urllib2
-
+import requests
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
+stderrlog = getLogger('stderrlog')
fail_unknown_host = False
-retries = 3
def create_cache_dir():
@@ -55,9 +50,11 @@ def add_http_value_to_dict(u, http_key, d, dict_key):
d[dict_key] = value
-def fetch_repodata(url, cache_dir=None, use_cache=False):
+def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
dotlog.debug("fetching repodata: %s ..." % url)
+ session = session or CondaSession()
+
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
cache = json.load(open(cache_path))
@@ -67,33 +64,32 @@ def fetch_repodata(url, cache_dir=None, use_cache=False):
if use_cache:
return cache
- request = urllib2.Request(url + 'repodata.json.bz2')
- if '_etag' in cache:
- request.add_header('If-None-Match', cache['_etag'])
- if '_mod' in cache:
- request.add_header('If-Modified-Since', cache['_mod'])
+ headers = {}
+ if "_tag" in cache:
+ headers["If-None-Match"] = cache["_etag"]
+ if "_mod" in cache:
+ headers["If-Modified-Since"] = cache["_mod"]
try:
- u = connectionhandled_urlopen(request)
- data = u.read()
- u.close()
- cache = json.loads(bz2.decompress(data).decode('utf-8'))
- add_http_value_to_dict(u, 'Etag', cache, '_etag')
- add_http_value_to_dict(u, 'Last-Modified', cache, '_mod')
+ resp = session.get(url + 'repodata.json.bz2', headers=headers)
+ resp.raise_for_status()
+ if resp.status_code != 304:
+ cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
except ValueError:
raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url)
- except urllib2.HTTPError as e:
- msg = "HTTPError: %d %s %s\n" % (e.code, e.msg, url)
+ except requests.exceptions.HTTPError as e:
+ msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
- if e.code != 304:
- raise RuntimeError(msg)
+ raise RuntimeError(msg)
- except urllib2.URLError as e:
- sys.stderr.write("Error: unknown host: %s (%r)\n" % (url, e))
+ except requests.exceptions.ConnectionError as e:
+ msg = "Connection error: %s: %s\n" % (e, url)
+ stderrlog.info('Could not connect to %s\n' % url)
+ log.debug(msg)
if fail_unknown_host:
- sys.exit(1)
+ raise RuntimeError(msg)
cache['_url'] = url
try:
@@ -104,16 +100,16 @@ def fetch_repodata(url, cache_dir=None, use_cache=False):
return cache or None
-
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
index = {}
stdoutlog.info("Fetching package metadata: ")
+ session = CondaSession()
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("\nError: URL '%s' not in allowed channels" % url)
- repodata = fetch_repodata(url, use_cache=use_cache)
+ repodata = fetch_repodata(url, use_cache=use_cache, session=session)
if repodata is None:
continue
new_index = repodata['packages']
@@ -141,107 +137,80 @@ def fetch_index(channel_urls, use_cache=False, unknown=False):
return index
-
-def fetch_pkg(info, dst_dir=None):
+def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
+ session = session or CondaSession()
+
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
- pp = path + '.part'
+
+ download(url, path, session=session, md5=info['md5'], urlstxt=True)
+
+def download(url, dst_path, session=None, md5=None, urlstxt=False):
+ pp = dst_path + '.part'
+ dst_dir = os.path.split(dst_path)[0]
+ session = session or CondaSession()
with Locked(dst_dir):
- for x in range(retries):
- try:
- fi = connectionhandled_urlopen(url)
- except IOError:
- log.debug("attempt %d failed at urlopen" % x)
- continue
- if fi is None:
- log.debug("could not fetch (urlopen returned None)")
- continue
- n = 0
- h = hashlib.new('md5')
- getLogger('fetch.start').info((fn, info['size']))
- need_retry = False
- try:
- fo = open(pp, 'wb')
- except IOError:
- raise RuntimeError("Could not open %r for writing. "
- "Permissions problem or missing directory?" % pp)
- while True:
- try:
- chunk = fi.read(16384)
- except IOError:
- need_retry = True
- break
- if not chunk:
- break
- try:
- fo.write(chunk)
- except IOError:
- raise RuntimeError("Failed to write to %r." % pp)
- h.update(chunk)
- n += len(chunk)
- getLogger('fetch.update').info(n)
+ try:
+ resp = session.get(url, stream=True)
+ except IOError:
+ raise RuntimeError("Could not open '%s'" % url)
+ except requests.exceptions.HTTPError as e:
+ msg = "HTTPError: %s: %s\n" % (e, url)
+ log.debug(msg)
+ raise RuntimeError(msg)
- fo.close()
- if need_retry:
- continue
+ size = resp.headers.get('Content-Length')
+ if size:
+ size = int(size)
+ fn = basename(dst_path)
+ getLogger('fetch.start').info((fn[:14], size))
+
+ n = 0
+ if md5:
+ h = hashlib.new('md5')
+ try:
+ with open(pp, 'wb') as fo:
+ for chunk in resp.iter_content(2**14):
+ try:
+ fo.write(chunk)
+ except IOError:
+ raise RuntimeError("Failed to write to %r." % pp)
+ if md5:
+ h.update(chunk)
+ n += len(chunk)
+ if size:
+ getLogger('fetch.update').info(n)
+ except IOError:
+ raise RuntimeError("Could not open %r for writing. "
+ "Permissions problem or missing directory?" % pp)
- fi.close()
+ if size:
getLogger('fetch.stop').info(None)
- if h.hexdigest() != info['md5']:
- raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (fn, h.hexdigest(), info['md5']))
- try:
- os.rename(pp, path)
- except OSError:
- raise RuntimeError("Could not rename %r to %r." % (pp, path))
+
+ if md5 and h.hexdigest() != md5:
+ raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (url, h.hexdigest(), md5))
+
+ try:
+ os.rename(pp, dst_path)
+ except OSError as e:
+ raise RuntimeError("Could not rename %r to %r: %r" % (pp,
+ dst_path, e))
+
+ if urlstxt:
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
- return
-
- raise RuntimeError("Could not locate '%s'" % url)
-
-
-def download(url, dst_path):
- try:
- u = connectionhandled_urlopen(url)
- except IOError:
- raise RuntimeError("Could not open '%s'" % url)
- except ValueError as e:
- raise RuntimeError(e)
-
- size = get_http_value(u, 'Content-Length')
- if size:
- size = int(size)
- fn = basename(dst_path)
- getLogger('fetch.start').info((fn[:14], size))
-
- n = 0
- fo = open(dst_path, 'wb')
- while True:
- chunk = u.read(16384)
- if not chunk:
- break
- fo.write(chunk)
- n += len(chunk)
- if size:
- getLogger('fetch.update').info(n)
- fo.close()
-
- u.close()
- if size:
- getLogger('fetch.stop').info(None)
-
class TmpDownload(object):
""" |
Add ability to keep retrying with a lock error
The yum installer (IIRC) has a nice feature that it will keep trying every 10 seconds or so if there is a lock error. This could be useful for conda.
| conda/lock.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
"""
Tools for working with locks
A lock is just an empty directory. We use directories because this lets us use
the race condition-proof os.makedirs.
For now, there is one global lock for all of conda, because some things happen
globally (such as downloading packages).
We don't raise an error if the lock is named with the current PID
"""
import os
from os.path import join
import glob
LOCKFN = '.conda_lock'
class Locked(object):
"""
Context manager to handle locks.
"""
def __init__(self, path):
self.path = path
self.end = "-" + str(os.getpid())
self.lock_path = join(self.path, LOCKFN + self.end)
self.pattern = join(self.path, LOCKFN + '-*')
self.remove = True
def __enter__(self):
files = glob.glob(self.pattern)
if files and not files[0].endswith(self.end):
# Keep the string "LOCKERROR" in this string so that external
# programs can look for it.
raise RuntimeError("""\
LOCKERROR: It looks like conda is already doing something.
The lock %s was found. Wait for it to finish before continuing.
If you are sure that conda is not running, remove it and try again.
You can also use: $ conda clean --lock""" % self.lock_path)
if not files:
try:
os.makedirs(self.lock_path)
except OSError:
pass
else: # PID lock already here --- someone else will remove it.
self.remove = False
def __exit__(self, exc_type, exc_value, traceback):
if self.remove:
for path in self.lock_path, self.path:
try:
os.rmdir(path)
except OSError:
pass
<|code_end|>
| conda/lock.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
"""
Tools for working with locks
A lock is just an empty directory. We use directories because this lets us use
the race condition-proof os.makedirs.
For now, there is one global lock for all of conda, because some things happen
globally (such as downloading packages).
We don't raise an error if the lock is named with the current PID
"""
import os
from os.path import join
import glob
from time import sleep
LOCKFN = '.conda_lock'
class Locked(object):
"""
Context manager to handle locks.
"""
def __init__(self, path):
self.path = path
self.end = "-" + str(os.getpid())
self.lock_path = join(self.path, LOCKFN + self.end)
self.pattern = join(self.path, LOCKFN + '-*')
self.remove = True
def __enter__(self):
retries = 10
# Keep the string "LOCKERROR" in this string so that external
# programs can look for it.
lockstr = ("""\
LOCKERROR: It looks like conda is already doing something.
The lock %s was found. Wait for it to finish before continuing.
If you are sure that conda is not running, remove it and try again.
You can also use: $ conda clean --lock""" % self.lock_path)
sleeptime = 1
while retries:
files = glob.glob(self.pattern)
if files and not files[0].endswith(self.end):
print(lockstr)
print("Sleeping for %s seconds" % sleeptime)
sleep(sleeptime)
sleeptime *= 2
retries -= 1
else:
break
else:
print("Exceeded max retries, giving up")
raise RuntimeError(lockstr)
if not files:
try:
os.makedirs(self.lock_path)
except OSError:
pass
else: # PID lock already here --- someone else will remove it.
self.remove = False
def __exit__(self, exc_type, exc_value, traceback):
if self.remove:
for path in self.lock_path, self.path:
try:
os.rmdir(path)
except OSError:
pass
| conda/lock.py
--- a/conda/lock.py
+++ b/conda/lock.py
@@ -19,7 +19,7 @@
import os
from os.path import join
import glob
-
+from time import sleep
LOCKFN = '.conda_lock'
@@ -36,15 +36,28 @@ def __init__(self, path):
self.remove = True
def __enter__(self):
- files = glob.glob(self.pattern)
- if files and not files[0].endswith(self.end):
- # Keep the string "LOCKERROR" in this string so that external
- # programs can look for it.
- raise RuntimeError("""\
-LOCKERROR: It looks like conda is already doing something.
-The lock %s was found. Wait for it to finish before continuing.
-If you are sure that conda is not running, remove it and try again.
-You can also use: $ conda clean --lock""" % self.lock_path)
+ retries = 10
+ # Keep the string "LOCKERROR" in this string so that external
+ # programs can look for it.
+ lockstr = ("""\
+ LOCKERROR: It looks like conda is already doing something.
+ The lock %s was found. Wait for it to finish before continuing.
+ If you are sure that conda is not running, remove it and try again.
+ You can also use: $ conda clean --lock""" % self.lock_path)
+ sleeptime = 1
+ while retries:
+ files = glob.glob(self.pattern)
+ if files and not files[0].endswith(self.end):
+ print(lockstr)
+ print("Sleeping for %s seconds" % sleeptime)
+ sleep(sleeptime)
+ sleeptime *= 2
+ retries -= 1
+ else:
+ break
+ else:
+ print("Exceeded max retries, giving up")
+ raise RuntimeError(lockstr)
if not files:
try: |
conda does not prompt for proxy username and password
```
[ COMPLETE ] |#################################################| 100%
The batch file cannot be found.
C:\Code>conda update conda
Fetching package metadata: .Error: HTTPError: 407 Client Error: Proxy Authentication Required: http://repo.continuum.io/pkgs/pro/win-64/
```
| conda/config.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import logging
from platform import machine
from os.path import abspath, expanduser, isfile, isdir, join
from conda.compat import urlparse
from conda.utils import try_write
log = logging.getLogger(__name__)
default_python = '%d.%d' % sys.version_info[:2]
# ----- operating system and architecture -----
_sys_map = {'linux2': 'linux', 'linux': 'linux',
'darwin': 'osx', 'win32': 'win'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if platform == 'linux' and machine() == 'armv6l':
subdir = 'linux-armv6l'
arch_name = 'armv6l'
else:
subdir = '%s-%d' % (platform, bits)
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs'
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.binstar.org/'
rc_bool_keys = [
'always_yes',
'allow_softlinks',
'changeps1',
'use_pip',
'binstar_upload',
'binstar_personal',
'show_channel_urls',
'allow_other_channels',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
'root_dir',
'channel_alias',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc(path):
if not path:
return {}
try:
import yaml
except ImportError:
sys.exit('Error: could not import yaml (required to read .condarc '
'config file: %s)' % path)
return yaml.load(open(path)) or {}
rc = load_condarc(rc_path)
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT',
rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
lst.insert(0, '~/envs')
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
envs_dirs = [abspath(expanduser(path)) for path in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs')
else:
return join(envs_dir, '.pkgs')
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
# ----- default environment prefix -----
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- channels -----
# Note, get_default_urls() and get_rc_urls() return unnormalized urls.
def get_default_urls():
return ['http://repo.continuum.io/pkgs/free',
'http://repo.continuum.io/pkgs/pro']
def get_rc_urls():
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
return urlparse.urlparse(url).scheme != ""
def normalize_urls(urls, platform=None):
platform = platform or subdir
newurls = []
for url in urls:
if url == "defaults":
newurls.extend(normalize_urls(get_default_urls(),
platform=platform))
elif url == "system":
if not rc_path:
newurls.extend(normalize_urls(get_default_urls(),
platform=platform))
else:
newurls.extend(normalize_urls(get_rc_urls(),
platform=platform))
elif not is_url(url):
moreurls = normalize_urls([rc.get('channel_alias',
DEFAULT_CHANNEL_ALIAS)+url], platform=platform)
newurls.extend(moreurls)
else:
newurls.append('%s/%s/' % (url.rstrip('/'), platform))
return newurls
def get_channel_urls(platform=None):
if os.getenv('CIO_TEST'):
base_urls = ['http://filer/pkgs/pro',
'http://filer/pkgs/free']
if os.getenv('CIO_TEST') == '2':
base_urls.insert(0, 'http://filer/test-pkgs')
elif 'channels' not in rc:
base_urls = get_default_urls()
else:
base_urls = get_rc_urls()
return normalize_urls(base_urls, platform=platform)
def canonical_channel_name(channel):
if channel is None:
return '<unknown>'
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1].split('/')[0]
elif any(channel.startswith(i) for i in get_default_urls()):
return 'defaults'
elif channel.startswith('http://filer/'):
return 'filer'
else:
return channel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
sys_rc = load_condarc(sys_rc_path)
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = sys_rc['channels']
else:
base_urls = get_default_urls()
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers')
if res is None or isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
# ----- misc -----
always_yes = bool(rc.get('always_yes', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('binstar_upload', None) # None means ask
binstar_personal = bool(rc.get('binstar_personal', True))
allow_softlinks = bool(rc.get('allow_softlinks', True))
self_update = bool(rc.get('self_update', True))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = bool(rc.get('show_channel_urls', False))
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
try:
track_features = set(rc['track_features'].split())
except KeyError:
track_features = None
<|code_end|>
conda/connection.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from logging import getLogger
import re
import mimetypes
import os
import email
import base64
import ftplib
import cgi
from io import BytesIO
from conda.compat import urlparse, StringIO
from conda.config import get_proxy_servers
import requests
RETRIES = 3
log = getLogger(__name__)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
self.proxies = get_proxy_servers()
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr,}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = code.split()[0]
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
<|code_end|>
conda/fetch.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import bz2
import json
import shutil
import hashlib
import tempfile
from logging import getLogger
from os.path import basename, isdir, join
import sys
from multiprocessing.pool import ThreadPool
from conda import config
from conda.utils import memoized
from conda.connection import CondaSession
from conda.compat import itervalues, get_http_value
from conda.lock import Locked
import requests
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(config.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
return '%s.json' % hashlib.md5(url.encode('utf-8')).hexdigest()
def add_http_value_to_dict(u, http_key, d, dict_key):
value = get_http_value(u, http_key)
if value:
d[dict_key] = value
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
dotlog.debug("fetching repodata: %s ..." % url)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
cache = json.load(open(cache_path))
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_tag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2', headers=headers)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
except ValueError:
raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url)
except requests.exceptions.HTTPError as e:
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = url
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
pool = ThreadPool(5)
index = {}
stdoutlog.info("Fetching package metadata: ")
session = CondaSession()
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(config.allowed_channels)))
repodatas = map(lambda url: (url, fetch_repodata(url,
use_cache=use_cache, session=session)), reversed(channel_urls))
for url, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for dn in os.listdir(pkgs_dir):
fn = dn + '.tar.bz2'
if fn in index:
continue
try:
with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if 'depends' not in meta:
continue
log.debug("adding cached pkg to index: %s" % fn)
index[fn] = meta
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
session = session or CondaSession()
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
def download(url, dst_path, session=None, md5=None, urlstxt=False):
pp = dst_path + '.part'
dst_dir = os.path.split(dst_path)[0]
session = session or CondaSession()
with Locked(dst_dir):
try:
resp = session.get(url, stream=True)
resp.raise_for_status()
except IOError:
raise RuntimeError("Could not open '%s'" % url)
except requests.exceptions.HTTPError as e:
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
for chunk in resp.iter_content(2**14):
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
n += len(chunk)
if size:
getLogger('fetch.update').info(n)
except IOError:
raise RuntimeError("Could not open %r for writing. "
"Permissions problem or missing directory?" % pp)
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" % (pp,
dst_path, e))
if urlstxt:
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from conda.console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
| conda/config.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import logging
from platform import machine
from os.path import abspath, expanduser, isfile, isdir, join
from conda.compat import urlparse
from conda.utils import try_write
log = logging.getLogger(__name__)
default_python = '%d.%d' % sys.version_info[:2]
# ----- operating system and architecture -----
_sys_map = {'linux2': 'linux', 'linux': 'linux',
'darwin': 'osx', 'win32': 'win'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if platform == 'linux' and machine() == 'armv6l':
subdir = 'linux-armv6l'
arch_name = 'armv6l'
else:
subdir = '%s-%d' % (platform, bits)
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs'
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.binstar.org/'
rc_bool_keys = [
'always_yes',
'allow_softlinks',
'changeps1',
'use_pip',
'binstar_upload',
'binstar_personal',
'show_channel_urls',
'allow_other_channels',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
'root_dir',
'channel_alias',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc(path):
if not path:
return {}
try:
import yaml
except ImportError:
sys.exit('Error: could not import yaml (required to read .condarc '
'config file: %s)' % path)
return yaml.load(open(path)) or {}
rc = load_condarc(rc_path)
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT',
rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
lst.insert(0, '~/envs')
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
envs_dirs = [abspath(expanduser(path)) for path in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs')
else:
return join(envs_dir, '.pkgs')
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
# ----- default environment prefix -----
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- channels -----
# Note, get_default_urls() and get_rc_urls() return unnormalized urls.
def get_default_urls():
return ['http://repo.continuum.io/pkgs/free',
'http://repo.continuum.io/pkgs/pro']
def get_rc_urls():
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
return urlparse.urlparse(url).scheme != ""
def normalize_urls(urls, platform=None):
platform = platform or subdir
newurls = []
for url in urls:
if url == "defaults":
newurls.extend(normalize_urls(get_default_urls(),
platform=platform))
elif url == "system":
if not rc_path:
newurls.extend(normalize_urls(get_default_urls(),
platform=platform))
else:
newurls.extend(normalize_urls(get_rc_urls(),
platform=platform))
elif not is_url(url):
moreurls = normalize_urls([rc.get('channel_alias',
DEFAULT_CHANNEL_ALIAS)+url], platform=platform)
newurls.extend(moreurls)
else:
newurls.append('%s/%s/' % (url.rstrip('/'), platform))
return newurls
def get_channel_urls(platform=None):
if os.getenv('CIO_TEST'):
base_urls = ['http://filer/pkgs/pro',
'http://filer/pkgs/free']
if os.getenv('CIO_TEST') == '2':
base_urls.insert(0, 'http://filer/test-pkgs')
elif 'channels' not in rc:
base_urls = get_default_urls()
else:
base_urls = get_rc_urls()
return normalize_urls(base_urls, platform=platform)
def canonical_channel_name(channel):
if channel is None:
return '<unknown>'
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1].split('/')[0]
elif any(channel.startswith(i) for i in get_default_urls()):
return 'defaults'
elif channel.startswith('http://filer/'):
return 'filer'
else:
return channel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
sys_rc = load_condarc(sys_rc_path)
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = sys_rc['channels']
else:
base_urls = get_default_urls()
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers')
if res is None:
import requests
return requests.utils.getproxies()
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
# ----- misc -----
always_yes = bool(rc.get('always_yes', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('binstar_upload', None) # None means ask
binstar_personal = bool(rc.get('binstar_personal', True))
allow_softlinks = bool(rc.get('allow_softlinks', True))
self_update = bool(rc.get('self_update', True))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = bool(rc.get('show_channel_urls', False))
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
try:
track_features = set(rc['track_features'].split())
except KeyError:
track_features = None
conda/connection.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from logging import getLogger
import re
import mimetypes
import os
import email
import base64
import ftplib
import cgi
from io import BytesIO
from conda.compat import urlparse, StringIO
from conda.config import get_proxy_servers
import requests
RETRIES = 3
log = getLogger(__name__)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
self.proxies = get_proxy_servers()
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr,}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = code.split()[0]
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
conda/fetch.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import bz2
import json
import shutil
import hashlib
import tempfile
from logging import getLogger
from os.path import basename, isdir, join
import sys
import getpass
# from multiprocessing.pool import ThreadPool
from conda import config
from conda.utils import memoized
from conda.connection import CondaSession, unparse_url
from conda.compat import itervalues, get_http_value, input
from conda.lock import Locked
import requests
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(config.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
return '%s.json' % hashlib.md5(url.encode('utf-8')).hexdigest()
def add_http_value_to_dict(u, http_key, d, dict_key):
value = get_http_value(u, http_key)
if value:
d[dict_key] = value
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
dotlog.debug("fetching repodata: %s ..." % url)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
cache = json.load(open(cache_path))
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_tag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2', headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
except ValueError as e:
raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
(url, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = url
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
index = {}
stdoutlog.info("Fetching package metadata: ")
session = CondaSession()
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(config.allowed_channels)))
repodatas = map(lambda url: (url, fetch_repodata(url,
use_cache=use_cache, session=session)), reversed(channel_urls))
for url, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for dn in os.listdir(pkgs_dir):
fn = dn + '.tar.bz2'
if fn in index:
continue
try:
with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if 'depends' not in meta:
continue
log.debug("adding cached pkg to index: %s" % fn)
index[fn] = meta
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
session = session or CondaSession()
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
def download(url, dst_path, session=None, md5=None, urlstxt=False):
pp = dst_path + '.part'
dst_dir = os.path.split(dst_path)[0]
session = session or CondaSession()
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
for chunk in resp.iter_content(2**14):
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
n += len(chunk)
if size:
getLogger('fetch.update').info(n)
except IOError:
raise RuntimeError("Could not open %r for writing. "
"Permissions problem or missing directory?" % pp)
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" % (pp,
dst_path, e))
if urlstxt:
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from conda.console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
| conda/config.py
--- a/conda/config.py
+++ b/conda/config.py
@@ -244,7 +244,10 @@ def get_allowed_channels():
def get_proxy_servers():
res = rc.get('proxy_servers')
- if res is None or isinstance(res, dict):
+ if res is None:
+ import requests
+ return requests.utils.getproxies()
+ if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -372,3 +372,51 @@ def parse_multipart_files(request):
buf.seek(0)
return buf
+
+# Taken from urllib3 (actually
+# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
+# requests.packages.urllib3 we can just use that.
+
+
+def unparse_url(U):
+ """
+ Convert a :class:`.Url` into a url
+
+ The input can be any iterable that gives ['scheme', 'auth', 'host',
+ 'port', 'path', 'query', 'fragment']. Unused items should be None.
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port).
+
+
+ Example: ::
+
+ >>> Url = parse_url('http://google.com/mail/')
+ >>> unparse_url(Url)
+ 'http://google.com/mail/'
+ >>> unparse_url(['http', 'username:password', 'host.com', 80,
+ ... '/path', 'query', 'fragment'])
+ 'http://username:password@host.com:80/path?query#fragment'
+ """
+ scheme, auth, host, port, path, query, fragment = U
+ url = ''
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url = scheme + '://'
+ if auth is not None:
+ url += auth + '@'
+ if host is not None:
+ url += host
+ if port is not None:
+ url += ':' + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += '?' + query
+ if fragment is not None:
+ url += '#' + fragment
+
+ return url
conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -15,12 +15,13 @@
from logging import getLogger
from os.path import basename, isdir, join
import sys
-from multiprocessing.pool import ThreadPool
+import getpass
+# from multiprocessing.pool import ThreadPool
from conda import config
from conda.utils import memoized
-from conda.connection import CondaSession
-from conda.compat import itervalues, get_http_value
+from conda.connection import CondaSession, unparse_url
+from conda.compat import itervalues, get_http_value, input
from conda.lock import Locked
import requests
@@ -73,20 +74,34 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
headers["If-Modified-Since"] = cache["_mod"]
try:
- resp = session.get(url + 'repodata.json.bz2', headers=headers)
+ resp = session.get(url + 'repodata.json.bz2', headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
- except ValueError:
- raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url)
+ except ValueError as e:
+ raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
+ (url, e))
except requests.exceptions.HTTPError as e:
+ if e.response.status_code == 407: # Proxy Authentication Required
+ handle_proxy_407(url, session)
+ # Try again
+ return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
+ # requests isn't so nice here. For whatever reason, https gives this
+ # error and http gives the above error. Also, there is no status_code
+ # attribute here. We have to just check if it looks like 407. See
+ # https://github.com/kennethreitz/requests/issues/2061.
+ if "407" in str(e): # Proxy Authentication Required
+ handle_proxy_407(url, session)
+ # Try again
+ return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
+
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
@@ -102,10 +117,31 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
return cache or None
+def handle_proxy_407(url, session):
+ """
+ Prompts the user for the proxy username and password and modifies the
+ proxy in the session object to include it.
+ """
+ # We could also use HTTPProxyAuth, but this does not work with https
+ # proxies (see https://github.com/kennethreitz/requests/issues/2061).
+ scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
+ username, passwd = get_proxy_username_and_pass(scheme)
+ session.proxies[scheme] = add_username_and_pass_to_url(session.proxies[scheme], username, passwd)
+
+def add_username_and_pass_to_url(url, username, passwd):
+ urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
+ urlparts[1] = username + ':' + passwd
+ return unparse_url(urlparts)
+
+def get_proxy_username_and_pass(scheme):
+ username = input("\n%s proxy username: " % scheme)
+ passwd = getpass.getpass("Password:")
+ return username, passwd
+
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
- pool = ThreadPool(5)
+ # pool = ThreadPool(5)
index = {}
stdoutlog.info("Fetching package metadata: ")
session = CondaSession()
@@ -171,15 +207,29 @@ def download(url, dst_path, session=None, md5=None, urlstxt=False):
with Locked(dst_dir):
try:
- resp = session.get(url, stream=True)
+ resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
- except IOError:
- raise RuntimeError("Could not open '%s'" % url)
except requests.exceptions.HTTPError as e:
+ if e.response.status_code == 407: # Proxy Authentication Required
+ handle_proxy_407(url, session)
+ # Try again
+ return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
+ except requests.exceptions.ConnectionError as e:
+ # requests isn't so nice here. For whatever reason, https gives this
+ # error and http gives the above error. Also, there is no status_code
+ # attribute here. We have to just check if it looks like 407. See
+ # https://github.com/kennethreitz/requests/issues/2061.
+ if "407" in str(e): # Proxy Authentication Required
+ handle_proxy_407(url, session)
+ # Try again
+ return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt)
+ except IOError as e:
+ raise RuntimeError("Could not open '%s': %s" % (url, e))
+
size = resp.headers.get('Content-Length')
if size:
size = int(size) |
`conda create --file deps.txt pkg1 pkg2 ... pkgn` doesn't work
```
$ echo "scipy" > deps.txt
$ conda create -n test08 --file deps.txt sympy
Fetching package metadata: ..
Solving package specifications: .
Package plan for installation in environment /home/mateusz/py/envs/test08:
The following packages will be linked:
package | build
---------------------------|-----------------
numpy-1.8.1 | py27_0 hard-link
openssl-1.0.1h | 0 hard-link
python-2.7.8 | 0 hard-link
readline-6.2 | 2 hard-link
scipy-0.14.0 | np18py27_0 hard-link
sqlite-3.8.4.1 | 0 hard-link
system-5.8 | 1 hard-link
tk-8.5.15 | 0 hard-link
zlib-1.2.7 | 0 hard-link
Proceed ([y]/n)? n
```
| conda/cli/install.py
<|code_start|>
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import tarfile
import tempfile
from os.path import isdir, join, basename, exists, abspath
import conda.config as config
import conda.plan as plan
from conda.api import get_index
from conda.cli import pscheck
from conda.cli import common
from conda.misc import touch_nonadmin
from conda.resolve import Resolve, MatchSpec
import conda.install as ci
def install_tar(prefix, tar_path, verbose=False):
from conda.misc import install_local_packages
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
install_local_packages(prefix, paths, verbose=verbose)
shutil.rmtree(tmp_dir)
def check_prefix(prefix):
from conda.config import root_env_name
name = basename(prefix)
if name.startswith('.'):
sys.exit("Error: environment name cannot start with '.': %s" % name)
if name == root_env_name:
sys.exit("Error: '%s' is a reserved environment name" % name)
if exists(prefix):
sys.exit("Error: prefix already exists: %s" % prefix)
def clone(src_arg, dst_prefix):
from conda.misc import clone_env
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
sys.exit('Error: could such directory: %s' % src_arg)
else:
src_prefix = common.find_prefix_name(src_arg)
if src_prefix is None:
sys.exit('Error: could not find environment: %s' % src_arg)
print("src_prefix: %r" % src_prefix)
print("dst_prefix: %r" % dst_prefix)
clone_env(src_prefix, dst_prefix)
def print_activate(arg):
print("#")
print("# To activate this environment, use:")
if sys.platform == 'win32':
print("# > activate %s" % arg)
else:
print("# $ source activate %s" % arg)
print("#")
print("# To deactivate this environment, use:")
print("# $ source deactivate")
print("#")
def get_revision(arg):
try:
return int(arg)
except ValueError:
sys.exit("Error: expected revision number, not: '%s'" % arg)
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
newenv = bool(command == 'create')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = common.get_prefix(args, search=not newenv)
if newenv:
check_prefix(prefix)
if command == 'update':
if args.all:
if args.packages:
sys.exit("""Error: --all cannot be used with packages""")
else:
if len(args.packages) == 0:
sys.exit("""Error: no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)
if command == 'update':
linked = ci.linked(prefix)
for name in args.packages:
common.arg2spec(name)
if '=' in name:
sys.exit("Invalid package name: '%s'" % (name))
if name not in set(ci.name_dist(d) for d in linked):
sys.exit("Error: package '%s' is not installed in %s" %
(name, prefix))
if newenv and args.clone:
if args.packages:
sys.exit('Error: did not expect any arguments for --clone')
clone(args.clone, prefix)
touch_nonadmin(prefix)
print_activate(args.name if args.name else prefix)
return
if newenv and not args.no_default_packages:
default_packages = config.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in config.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
if args.file:
specs = common.specs_from_url(args.file)
elif getattr(args, 'all', False):
specs = []
linked = ci.linked(prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s >=%s' % (name, ver))
else:
specs = common.specs_from_args(args.packages)
if command == 'install' and args.revision:
get_revision(args.revision)
else:
common.check_specs(prefix, specs)
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build import config as build_config
except ImportError:
sys.exit("Error: you need to have 'conda-build' installed"
" to use the --use-local option")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
index = get_index([url_path(build_config.croot)],
use_cache=args.use_index_cache,
unknown=args.unknown)
else:
index = get_index(channel_urls=channel_urls, prepend=not
args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown)
# Don't update packages that are already up-to-date
if command == 'update' and not args.all:
r = Resolve(index)
orig_packages = args.packages[:]
for name in orig_packages:
installed_metadata = [ci.is_linked(prefix, dist)
for dist in linked]
vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
if dist.rsplit('-', 2)[0] == name]
build_inst = [m['build_number'] for m in installed_metadata if
m['name'] == name]
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
pkgs = sorted(r.get_pkgs(MatchSpec(name)))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if latest.version == vers_inst[0] and latest.build_number == build_inst[0]:
args.packages.remove(name)
if not args.packages:
from conda.cli.main_list import list_packages
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
list_packages(prefix, regex)
return
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
install_tar(prefix, tar_path, verbose=not args.quiet)
return
# handle explicit installs of conda packages
if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
from conda.misc import install_local_packages
install_local_packages(prefix, args.packages, verbose=not args.quiet)
return
if any(s.endswith('.tar.bz2') for s in args.packages):
sys.exit("cannot mix specifications with conda package filenames")
if args.force:
args.no_deps = True
spec_names = set(s.split()[0] for s in specs)
if args.no_deps:
only_names = spec_names
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
sys.exit("Error: could not create directory: %s" % prefix)
else:
sys.exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix)
if command == 'install' and args.revision:
actions = plan.revert_actions(prefix, get_revision(args.revision))
else:
actions = plan.install_actions(prefix, index, specs, force=args.force,
only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint)
if plan.nothing_to_do(actions):
from conda.cli.main_list import list_packages
regex = '^(%s)$' % '|'.join(spec_names)
print('\n# All requested packages already installed.')
list_packages(prefix, regex)
return
print()
print("Package plan for installation in environment %s:" % prefix)
plan.display_actions(actions, index)
if command in {'install', 'update'}:
common.check_write(command, prefix)
if not pscheck.main(args):
common.confirm_yn(args)
plan.execute_actions(actions, index, verbose=not args.quiet)
if newenv:
touch_nonadmin(prefix)
print_activate(args.name if args.name else prefix)
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False):
try:
prefix = tempfile.mkdtemp('conda')
specs = common.specs_from_args(packages)
index = get_index(channel_urls=channel_urls, prepend=prepend,
platform=platform)
plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint)
finally:
ci.rm_rf(prefix)
<|code_end|>
| conda/cli/install.py
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import tarfile
import tempfile
from os.path import isdir, join, basename, exists, abspath
import conda.config as config
import conda.plan as plan
from conda.api import get_index
from conda.cli import pscheck
from conda.cli import common
from conda.misc import touch_nonadmin
from conda.resolve import Resolve, MatchSpec
import conda.install as ci
def install_tar(prefix, tar_path, verbose=False):
from conda.misc import install_local_packages
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
install_local_packages(prefix, paths, verbose=verbose)
shutil.rmtree(tmp_dir)
def check_prefix(prefix):
from conda.config import root_env_name
name = basename(prefix)
if name.startswith('.'):
sys.exit("Error: environment name cannot start with '.': %s" % name)
if name == root_env_name:
sys.exit("Error: '%s' is a reserved environment name" % name)
if exists(prefix):
sys.exit("Error: prefix already exists: %s" % prefix)
def clone(src_arg, dst_prefix):
from conda.misc import clone_env
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
sys.exit('Error: could such directory: %s' % src_arg)
else:
src_prefix = common.find_prefix_name(src_arg)
if src_prefix is None:
sys.exit('Error: could not find environment: %s' % src_arg)
print("src_prefix: %r" % src_prefix)
print("dst_prefix: %r" % dst_prefix)
clone_env(src_prefix, dst_prefix)
def print_activate(arg):
print("#")
print("# To activate this environment, use:")
if sys.platform == 'win32':
print("# > activate %s" % arg)
else:
print("# $ source activate %s" % arg)
print("#")
print("# To deactivate this environment, use:")
print("# $ source deactivate")
print("#")
def get_revision(arg):
try:
return int(arg)
except ValueError:
sys.exit("Error: expected revision number, not: '%s'" % arg)
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
newenv = bool(command == 'create')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = common.get_prefix(args, search=not newenv)
if newenv:
check_prefix(prefix)
if command == 'update':
if args.all:
if args.packages:
sys.exit("""Error: --all cannot be used with packages""")
else:
if len(args.packages) == 0:
sys.exit("""Error: no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)
if command == 'update':
linked = ci.linked(prefix)
for name in args.packages:
common.arg2spec(name)
if '=' in name:
sys.exit("Invalid package name: '%s'" % (name))
if name not in set(ci.name_dist(d) for d in linked):
sys.exit("Error: package '%s' is not installed in %s" %
(name, prefix))
if newenv and args.clone:
if args.packages:
sys.exit('Error: did not expect any arguments for --clone')
clone(args.clone, prefix)
touch_nonadmin(prefix)
print_activate(args.name if args.name else prefix)
return
if newenv and not args.no_default_packages:
default_packages = config.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in config.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
specs = []
if args.file:
specs.extend(common.specs_from_url(args.file))
elif getattr(args, 'all', False):
linked = ci.linked(prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s >=%s' % (name, ver))
specs.extend(common.specs_from_args(args.packages))
if command == 'install' and args.revision:
get_revision(args.revision)
else:
common.check_specs(prefix, specs)
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build import config as build_config
except ImportError:
sys.exit("Error: you need to have 'conda-build' installed"
" to use the --use-local option")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
index = get_index([url_path(build_config.croot)],
use_cache=args.use_index_cache,
unknown=args.unknown)
else:
index = get_index(channel_urls=channel_urls, prepend=not
args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown)
# Don't update packages that are already up-to-date
if command == 'update' and not args.all:
r = Resolve(index)
orig_packages = args.packages[:]
for name in orig_packages:
installed_metadata = [ci.is_linked(prefix, dist)
for dist in linked]
vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
if dist.rsplit('-', 2)[0] == name]
build_inst = [m['build_number'] for m in installed_metadata if
m['name'] == name]
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
pkgs = sorted(r.get_pkgs(MatchSpec(name)))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if latest.version == vers_inst[0] and latest.build_number == build_inst[0]:
args.packages.remove(name)
if not args.packages:
from conda.cli.main_list import list_packages
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
list_packages(prefix, regex)
return
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
install_tar(prefix, tar_path, verbose=not args.quiet)
return
# handle explicit installs of conda packages
if args.packages and all(s.endswith('.tar.bz2') for s in args.packages):
from conda.misc import install_local_packages
install_local_packages(prefix, args.packages, verbose=not args.quiet)
return
if any(s.endswith('.tar.bz2') for s in args.packages):
sys.exit("cannot mix specifications with conda package filenames")
if args.force:
args.no_deps = True
spec_names = set(s.split()[0] for s in specs)
if args.no_deps:
only_names = spec_names
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
sys.exit("Error: could not create directory: %s" % prefix)
else:
sys.exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix)
if command == 'install' and args.revision:
actions = plan.revert_actions(prefix, get_revision(args.revision))
else:
actions = plan.install_actions(prefix, index, specs, force=args.force,
only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint)
if plan.nothing_to_do(actions):
from conda.cli.main_list import list_packages
regex = '^(%s)$' % '|'.join(spec_names)
print('\n# All requested packages already installed.')
list_packages(prefix, regex)
return
print()
print("Package plan for installation in environment %s:" % prefix)
plan.display_actions(actions, index)
if command in {'install', 'update'}:
common.check_write(command, prefix)
if not pscheck.main(args):
common.confirm_yn(args)
plan.execute_actions(actions, index, verbose=not args.quiet)
if newenv:
touch_nonadmin(prefix)
print_activate(args.name if args.name else prefix)
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False):
try:
prefix = tempfile.mkdtemp('conda')
specs = common.specs_from_args(packages)
index = get_index(channel_urls=channel_urls, prepend=prepend,
platform=platform)
plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint)
finally:
ci.rm_rf(prefix)
| conda/cli/install.py
--- a/conda/cli/install.py
+++ b/conda/cli/install.py
@@ -143,10 +143,10 @@ def install(args, parser, command='install'):
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
+ specs = []
if args.file:
- specs = common.specs_from_url(args.file)
+ specs.extend(common.specs_from_url(args.file))
elif getattr(args, 'all', False):
- specs = []
linked = ci.linked(prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
@@ -155,8 +155,7 @@ def install(args, parser, command='install'):
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s >=%s' % (name, ver))
- else:
- specs = common.specs_from_args(args.packages)
+ specs.extend(common.specs_from_args(args.packages))
if command == 'install' and args.revision:
get_revision(args.revision) |
conda command-line tool provides a convenience command to run the Python executable from a specified conda environment
I often want to run the Python interpreter from a specific conda environment while knowing only the name of that environment. I know that `conda -e` gives the path to each conda environment, from which I can derive the path of an environment-specific Python interpreter or the `activate` shell script, but this is inconvenient extra step.
It would be convenient to have a conda command like `conda intepreter -n ${environment} -- [args]` that invokes the environment-specific Python interpreter, inherits STDIN and command-line arguments, and returns exit code of Python. Ideally, it would be a drop-in replacement for directly running the Python interpreter, as documented https://docs.python.org/2/tutorial/interpreter.html.
My shell-fu is weak, but I think that something like `"exec /path/to/environment/bin/python "$@"` might work.
conda command-line tool provides a convenience command to run the Python executable from a specified conda environment
I often want to run the Python interpreter from a specific conda environment while knowing only the name of that environment. I know that `conda -e` gives the path to each conda environment, from which I can derive the path of an environment-specific Python interpreter or the `activate` shell script, but this is inconvenient extra step.
It would be convenient to have a conda command like `conda intepreter -n ${environment} -- [args]` that invokes the environment-specific Python interpreter, inherits STDIN and command-line arguments, and returns exit code of Python. Ideally, it would be a drop-in replacement for directly running the Python interpreter, as documented https://docs.python.org/2/tutorial/interpreter.html.
My shell-fu is weak, but I think that something like `"exec /path/to/environment/bin/python "$@"` might work.
| conda/cli/main_run.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import sys
from conda.cli import common
descr = "Launches an application installed with Conda."
def configure_parser(sub_parsers):
p = sub_parsers.add_parser('run',
description = descr,
help = descr)
common.add_parser_prefix(p)
common.add_parser_json(p)
p.add_argument(
'package',
metavar = 'COMMAND',
action = "store",
nargs = '?',
help = "package to launch"
)
p.add_argument(
'arguments',
metavar = 'ARGUMENTS',
action = 'store',
nargs = '*',
help = "additional arguments to application"
)
p.set_defaults(func=execute)
def execute(args, parser):
if not args.package:
parser.print_help()
return
import conda.install
import conda.resolve
from conda.api import get_package_versions, app_is_installed
from conda.misc import launch
prefix = common.get_prefix(args)
if args.package.endswith('.tar.bz2'):
if app_is_installed(args.package, prefixes=[prefix]):
fn = args.package
else:
error_message = "Package {} not installed.".format(args.package)
common.error_and_exit(error_message, json=args.json,
error_type="PackageNotInstalled")
else:
installed = []
for pkg in get_package_versions(args.package):
if app_is_installed(pkg.fn, prefixes=[prefix]):
installed.append(pkg)
for pkg in conda.install.linked(prefix):
name, version, build = pkg.rsplit('-', 2)
if name == args.package:
installed = [conda.resolve.Package(pkg + '.tar.bz2',
conda.install.is_linked(prefix, pkg))]
if not installed:
error_message = "App {} not installed.".format(args.package)
common.error_and_exit(error_message, json=args.json,
error_type="AppNotInstalled")
package = max(installed)
fn = package.fn
try:
subprocess = launch(fn, prefix=prefix, additional_args=args.arguments)
if args.json:
common.stdout_json(dict(fn=fn, pid=subprocess.pid))
else:
print("Started app. Some apps may take a while to finish loading.")
except Exception as e:
common.exception_and_exit(e, json=args.json)
<|code_end|>
conda/misc.py
<|code_start|>
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import subprocess
from collections import defaultdict
from distutils.spawn import find_executable
from os.path import (abspath, basename, dirname, expanduser, exists,
isdir, isfile, islink, join)
from conda import config
from conda import install
from conda.api import get_index
from conda.plan import (RM_EXTRACTED, EXTRACT, UNLINK, LINK,
ensure_linked_actions, execute_actions)
from conda.compat import iteritems
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install.linked(prefix):
meta = install.is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
def rel_path(prefix, path):
res = path[len(prefix) + 1:]
if sys.platform == 'win32':
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes',
'.index', '.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files:
if fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
for fn2 in files:
if ignore_predefined_files:
if root == join(prefix, 'bin') and fn2 in binignore:
continue
res.add(rel_path(prefix, join(root, fn2)))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(rel_path(prefix, path))
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform=='darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install.linked(prefix):
meta = install.is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not install.name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(config.root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def clone_env(prefix1, prefix2, verbose=True, quiet=False):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
dists = discard_conda(install.linked(prefix1))
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = ensure_linked_actions(dists, prefix2)
execute_actions(actions, index=get_index(), verbose=not quiet)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
# copy packages to pkgs dir
dists = []
for src_path in paths:
assert src_path.endswith('.tar.bz2')
fn = basename(src_path)
dists.append(fn[:-8])
dst_path = join(config.pkgs_dirs[0], fn)
if abspath(src_path) == abspath(dst_path):
continue
shutil.copyfile(src_path, dst_path)
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK
for dist in dists:
actions[RM_EXTRACTED].append(dist)
actions[EXTRACT].append(dist)
if install.is_linked(prefix, dist):
actions[UNLINK].append(dist)
actions[LINK].append(dist)
execute_actions(actions, verbose=verbose)
def launch(fn, prefix=config.root_dir, additional_args=None):
info = install.is_linked(prefix, fn[:-8])
if info is None:
return None
if not info.get('type') == 'app':
raise Exception('Not an application: %s' % fn)
# prepend the bin directory to the path
fmt = r'%s\Scripts;%s' if sys.platform == 'win32' else '%s/bin:%s'
env = {'PATH': fmt % (abspath(prefix), os.getenv('PATH'))}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
# allow updating environment variables from metadata
if 'app_env' in info:
env.update(info['app_env'])
# call the entry command
args = info['app_entry'].split()
args = [a.replace('${PREFIX}', prefix) for a in args]
arg0 = find_executable(args[0], env['PATH'])
if arg0 is None:
raise Exception('Executable not found: %s' % args[0])
args[0] = arg0
cwd = abspath(expanduser('~'))
if additional_args:
args.extend(additional_args)
return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False)
def make_icon_url(info):
if 'channel' in info and 'icon' in info:
base_url = dirname(info['channel'].rstrip('/'))
icon_fn = info['icon']
#icon_cache_path = join(config.pkgs_dir, 'cache', icon_fn)
#if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in config.envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield config.root_dir
if __name__ == '__main__':
from optparse import OptionParser
p = OptionParser(usage="usage: %prog [options] DIST/FN [ADDITIONAL ARGS]")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
opts, args = p.parse_args()
if len(args) == 0:
p.error('at least one argument expected')
fn = args[0]
if not fn.endswith('.tar.bz2'):
fn += '.tar.bz2'
p = launch(fn, opts.prefix, args[1:])
print('PID:', p.pid)
<|code_end|>
| conda/cli/main_run.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import sys
import logging
from conda.cli import common
descr = "Launches an application installed with Conda."
def configure_parser(sub_parsers):
p = sub_parsers.add_parser('run',
description = descr,
help = descr)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
common.add_parser_json(p)
p.add_argument(
'package',
metavar = 'COMMAND',
action = "store",
nargs = '?',
help = "package to launch"
)
p.add_argument(
'arguments',
metavar = 'ARGUMENTS',
action = 'store',
nargs = '*',
help = "additional arguments to application"
)
p.set_defaults(func=execute)
def execute(args, parser):
if not args.package:
parser.print_help()
return
import conda.install
import conda.resolve
from conda.api import get_package_versions, app_is_installed
from conda.misc import launch
prefix = common.get_prefix(args)
if args.quiet:
logging.disable(logging.CRITICAL)
if args.package.endswith('.tar.bz2'):
if app_is_installed(args.package, prefixes=[prefix]):
fn = args.package
else:
error_message = "Package {} not installed.".format(args.package)
common.error_and_exit(error_message, json=args.json,
error_type="PackageNotInstalled")
else:
installed = []
for pkg in get_package_versions(args.package):
if app_is_installed(pkg.fn, prefixes=[prefix]):
installed.append(pkg)
for pkg in conda.install.linked(prefix):
name, version, build = pkg.rsplit('-', 2)
if name == args.package:
installed = [conda.resolve.Package(pkg + '.tar.bz2',
conda.install.is_linked(prefix, pkg))]
break
if installed:
package = max(installed)
fn = package.fn
try:
subprocess = launch(fn, prefix=prefix,
additional_args=args.arguments,
background=args.json)
if args.json:
common.stdout_json(dict(fn=fn, pid=subprocess.pid))
elif not args.quiet:
print("Started app. Some apps may take a while to finish loading.")
except TypeError:
execute_command(args.package, prefix, args.arguments, args.json)
except Exception as e:
common.exception_and_exit(e, json=args.json)
else:
# Try interpreting it as a command
execute_command(args.package, prefix, args.arguments, args.json)
def execute_command(cmd, prefix, additional_args, json=False):
from conda.misc import execute_in_environment
try:
process = execute_in_environment(
cmd, prefix=prefix, additional_args=additional_args, inherit=not json)
if not json:
sys.exit(process.wait())
else:
common.stdout_json(dict(cmd=cmd, pid=process.pid))
except OSError:
error_message = "App {} not installed.".format(cmd)
common.error_and_exit(error_message, json=json,
error_type="AppNotInstalled")
conda/misc.py
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import subprocess
from collections import defaultdict
from distutils.spawn import find_executable
from os.path import (abspath, basename, dirname, expanduser, exists,
isdir, isfile, islink, join)
from conda import config
from conda import install
from conda.api import get_index
from conda.plan import (RM_EXTRACTED, EXTRACT, UNLINK, LINK,
ensure_linked_actions, execute_actions)
from conda.compat import iteritems
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install.linked(prefix):
meta = install.is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
def rel_path(prefix, path):
res = path[len(prefix) + 1:]
if sys.platform == 'win32':
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes',
'.index', '.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files:
if fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
for fn2 in files:
if ignore_predefined_files:
if root == join(prefix, 'bin') and fn2 in binignore:
continue
res.add(rel_path(prefix, join(root, fn2)))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(rel_path(prefix, path))
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform=='darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install.linked(prefix):
meta = install.is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not install.name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(config.root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def clone_env(prefix1, prefix2, verbose=True, quiet=False):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
dists = discard_conda(install.linked(prefix1))
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = ensure_linked_actions(dists, prefix2)
execute_actions(actions, index=get_index(), verbose=not quiet)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
# copy packages to pkgs dir
dists = []
for src_path in paths:
assert src_path.endswith('.tar.bz2')
fn = basename(src_path)
dists.append(fn[:-8])
dst_path = join(config.pkgs_dirs[0], fn)
if abspath(src_path) == abspath(dst_path):
continue
shutil.copyfile(src_path, dst_path)
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_EXTRACTED, EXTRACT, UNLINK, LINK
for dist in dists:
actions[RM_EXTRACTED].append(dist)
actions[EXTRACT].append(dist)
if install.is_linked(prefix, dist):
actions[UNLINK].append(dist)
actions[LINK].append(dist)
execute_actions(actions, verbose=verbose)
def environment_for_conda_environment(prefix=config.root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = r'%s;%s' if sys.platform == 'win32' else '%s:%s'
path = path % (binpath, os.getenv('PATH'))
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def launch(fn, prefix=config.root_dir, additional_args=None, background=False):
info = install.is_linked(prefix, fn[:-8])
if info is None:
return None
if not info.get('type') == 'app':
raise TypeError('Not an application: %s' % fn)
binpath, env = environment_for_conda_environment(prefix)
# allow updating environment variables from metadata
if 'app_env' in info:
env.update(info['app_env'])
# call the entry command
args = info['app_entry'].split()
args = [a.replace('${PREFIX}', prefix) for a in args]
arg0 = find_executable(args[0], env['PATH'])
if arg0 is None:
raise Exception('Executable not found: %s' % args[0])
args[0] = arg0
cwd = abspath(expanduser('~'))
if additional_args:
args.extend(additional_args)
if sys.platform == 'win32' and background:
return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False,
creationflags=subprocess.CREATE_NEW_CONSOLE)
else:
return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False)
def execute_in_environment(cmd, prefix=config.root_dir, additional_args=None,
inherit=True):
"""Runs ``cmd`` in the specified environment.
``inherit`` specifies whether the child inherits stdio handles (for JSON
output, we don't want to trample this process's stdout).
"""
binpath, env = environment_for_conda_environment(prefix)
if sys.platform == 'win32' and cmd == 'python':
# python is located one directory up on Windows
cmd = join(binpath, '..', cmd)
else:
cmd = join(binpath, cmd)
args = [cmd]
if additional_args:
args.extend(additional_args)
if inherit:
stdin, stdout, stderr = None, None, None
else:
stdin, stdout, stderr = subprocess.PIPE, subprocess.PIPE, subprocess.PIPE
if sys.platform == 'win32' and not inherit:
return subprocess.Popen(args, env=env, close_fds=False,
stdin=stdin, stdout=stdout, stderr=stderr,
creationflags=subprocess.CREATE_NEW_CONSOLE)
else:
return subprocess.Popen(args, env=env, close_fds=False,
stdin=stdin, stdout=stdout, stderr=stderr)
def make_icon_url(info):
if 'channel' in info and 'icon' in info:
base_url = dirname(info['channel'].rstrip('/'))
icon_fn = info['icon']
#icon_cache_path = join(config.pkgs_dir, 'cache', icon_fn)
#if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in config.envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield config.root_dir
if __name__ == '__main__':
from optparse import OptionParser
p = OptionParser(usage="usage: %prog [options] DIST/FN [ADDITIONAL ARGS]")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
opts, args = p.parse_args()
if len(args) == 0:
p.error('at least one argument expected')
fn = args[0]
if not fn.endswith('.tar.bz2'):
fn += '.tar.bz2'
p = launch(fn, opts.prefix, args[1:])
print('PID:', p.pid)
| conda/cli/main_run.py
--- a/conda/cli/main_run.py
+++ b/conda/cli/main_run.py
@@ -7,6 +7,7 @@
from __future__ import print_function, division, absolute_import
import sys
+import logging
from conda.cli import common
@@ -17,6 +18,7 @@ def configure_parser(sub_parsers):
description = descr,
help = descr)
common.add_parser_prefix(p)
+ common.add_parser_quiet(p)
common.add_parser_json(p)
p.add_argument(
'package',
@@ -47,6 +49,9 @@ def execute(args, parser):
prefix = common.get_prefix(args)
+ if args.quiet:
+ logging.disable(logging.CRITICAL)
+
if args.package.endswith('.tar.bz2'):
if app_is_installed(args.package, prefixes=[prefix]):
fn = args.package
@@ -65,20 +70,38 @@ def execute(args, parser):
if name == args.package:
installed = [conda.resolve.Package(pkg + '.tar.bz2',
conda.install.is_linked(prefix, pkg))]
+ break
- if not installed:
- error_message = "App {} not installed.".format(args.package)
- common.error_and_exit(error_message, json=args.json,
- error_type="AppNotInstalled")
+ if installed:
+ package = max(installed)
+ fn = package.fn
- package = max(installed)
- fn = package.fn
+ try:
+ subprocess = launch(fn, prefix=prefix,
+ additional_args=args.arguments,
+ background=args.json)
+ if args.json:
+ common.stdout_json(dict(fn=fn, pid=subprocess.pid))
+ elif not args.quiet:
+ print("Started app. Some apps may take a while to finish loading.")
+ except TypeError:
+ execute_command(args.package, prefix, args.arguments, args.json)
+ except Exception as e:
+ common.exception_and_exit(e, json=args.json)
+ else:
+ # Try interpreting it as a command
+ execute_command(args.package, prefix, args.arguments, args.json)
+def execute_command(cmd, prefix, additional_args, json=False):
+ from conda.misc import execute_in_environment
try:
- subprocess = launch(fn, prefix=prefix, additional_args=args.arguments)
- if args.json:
- common.stdout_json(dict(fn=fn, pid=subprocess.pid))
+ process = execute_in_environment(
+ cmd, prefix=prefix, additional_args=additional_args, inherit=not json)
+ if not json:
+ sys.exit(process.wait())
else:
- print("Started app. Some apps may take a while to finish loading.")
- except Exception as e:
- common.exception_and_exit(e, json=args.json)
+ common.stdout_json(dict(cmd=cmd, pid=process.pid))
+ except OSError:
+ error_message = "App {} not installed.".format(cmd)
+ common.error_and_exit(error_message, json=json,
+ error_type="AppNotInstalled")
conda/misc.py
--- a/conda/misc.py
+++ b/conda/misc.py
@@ -199,21 +199,29 @@ def install_local_packages(prefix, paths, verbose=False):
execute_actions(actions, verbose=verbose)
-def launch(fn, prefix=config.root_dir, additional_args=None):
+def environment_for_conda_environment(prefix=config.root_dir):
+ # prepend the bin directory to the path
+ fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
+ binpath = fmt % abspath(prefix)
+ path = r'%s;%s' if sys.platform == 'win32' else '%s:%s'
+ path = path % (binpath, os.getenv('PATH'))
+ env = {'PATH': path}
+ # copy existing environment variables, but not anything with PATH in it
+ for k, v in iteritems(os.environ):
+ if k != 'PATH':
+ env[k] = v
+ return binpath, env
+
+
+def launch(fn, prefix=config.root_dir, additional_args=None, background=False):
info = install.is_linked(prefix, fn[:-8])
if info is None:
return None
if not info.get('type') == 'app':
- raise Exception('Not an application: %s' % fn)
+ raise TypeError('Not an application: %s' % fn)
- # prepend the bin directory to the path
- fmt = r'%s\Scripts;%s' if sys.platform == 'win32' else '%s/bin:%s'
- env = {'PATH': fmt % (abspath(prefix), os.getenv('PATH'))}
- # copy existing environment variables, but not anything with PATH in it
- for k, v in iteritems(os.environ):
- if k != 'PATH':
- env[k] = v
+ binpath, env = environment_for_conda_environment(prefix)
# allow updating environment variables from metadata
if 'app_env' in info:
env.update(info['app_env'])
@@ -229,7 +237,44 @@ def launch(fn, prefix=config.root_dir, additional_args=None):
cwd = abspath(expanduser('~'))
if additional_args:
args.extend(additional_args)
- return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False)
+ if sys.platform == 'win32' and background:
+ return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False,
+ creationflags=subprocess.CREATE_NEW_CONSOLE)
+ else:
+ return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False)
+
+
+def execute_in_environment(cmd, prefix=config.root_dir, additional_args=None,
+ inherit=True):
+ """Runs ``cmd`` in the specified environment.
+
+ ``inherit`` specifies whether the child inherits stdio handles (for JSON
+ output, we don't want to trample this process's stdout).
+ """
+ binpath, env = environment_for_conda_environment(prefix)
+
+ if sys.platform == 'win32' and cmd == 'python':
+ # python is located one directory up on Windows
+ cmd = join(binpath, '..', cmd)
+ else:
+ cmd = join(binpath, cmd)
+
+ args = [cmd]
+ if additional_args:
+ args.extend(additional_args)
+
+ if inherit:
+ stdin, stdout, stderr = None, None, None
+ else:
+ stdin, stdout, stderr = subprocess.PIPE, subprocess.PIPE, subprocess.PIPE
+
+ if sys.platform == 'win32' and not inherit:
+ return subprocess.Popen(args, env=env, close_fds=False,
+ stdin=stdin, stdout=stdout, stderr=stderr,
+ creationflags=subprocess.CREATE_NEW_CONSOLE)
+ else:
+ return subprocess.Popen(args, env=env, close_fds=False,
+ stdin=stdin, stdout=stdout, stderr=stderr)
def make_icon_url(info): |
Use rmtree workaround for write-protected files on Windows
See https://stackoverflow.com/questions/1889597/deleting-directory-in-python/1889686#1889686. Alternately we can use rd /s.
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import time
import os
import json
import shutil
import stat
import sys
import subprocess
import tarfile
import traceback
import logging
import shlex
from os.path import abspath, basename, dirname, isdir, isfile, islink, join
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
# on Windows we cannot update these packages in the root environment
# because of the file lock problem
win_ignore_root = set(['python', 'pycosat', 'psutil'])
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def rm_rf(path, max_retries=5):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
os.unlink(path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path)
return
except OSError as e:
log.debug("Unable to delete %s (%s): retrying after %s "
"seconds" % (path, e, i))
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
if abspath(prefix) != abspath(sys.prefix):
# we currently only want to create menu items for packages
# in default environment
return
menu_files = [f for f in files
if f.startswith('Menu/') and f.endswith('.json')]
if not menu_files:
return
try:
import menuinst
except ImportError:
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = env_prefix or prefix
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.exists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.exists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.exists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.exists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src)
assert not isfile(dst)
if not isdir(prefix):
os.makedirs(prefix)
try:
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
link(pkgs_dir, prefix, dist)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import time
import os
import json
import shutil
import stat
import sys
import subprocess
import tarfile
import traceback
import logging
import shlex
from os.path import abspath, basename, dirname, isdir, isfile, islink, join
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
# on Windows we cannot update these packages in the root environment
# because of the file lock problem
win_ignore_root = set(['python', 'pycosat', 'psutil'])
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def rm_rf(path, max_retries=5):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
os.unlink(path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win and e.args[0] == 5:
try:
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
return
except subprocess.CalledProcessError as e1:
msg += '%s\n' % e1
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
if on_win and e.args[0] == 5:
subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
else:
shutil.rmtree(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
if abspath(prefix) != abspath(sys.prefix):
# we currently only want to create menu items for packages
# in default environment
return
menu_files = [f for f in files
if f.startswith('Menu/') and f.endswith('.json')]
if not menu_files:
return
try:
import menuinst
except ImportError:
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = env_prefix or prefix
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.exists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.exists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.exists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.exists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src)
assert not isfile(dst)
if not isdir(prefix):
os.makedirs(prefix)
try:
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
link(pkgs_dir, prefix, dist)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -155,11 +155,20 @@ def rm_rf(path, max_retries=5):
shutil.rmtree(path)
return
except OSError as e:
- log.debug("Unable to delete %s (%s): retrying after %s "
- "seconds" % (path, e, i))
+ msg = "Unable to delete %s\n%s\n" % (path, e)
+ if on_win and e.args[0] == 5:
+ try:
+ subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
+ return
+ except subprocess.CalledProcessError as e1:
+ msg += '%s\n' % e1
+ log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
- shutil.rmtree(path)
+ if on_win and e.args[0] == 5:
+ subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path])
+ else:
+ shutil.rmtree(path)
def rm_empty_dir(path):
""" |
Only try updating outdated packages with update --all
conda update --all tends to fail a lot because it requires that the whole environment become satisfiable, and without downgrading any packages. Perhaps a better solution would be to only try installing those packages that are known to be outdated.
Another idea would be to relax the downgrade restriction, and just have it essentially "reinstall" the environment. This could lead to some surprises when it does downgrade things, but it would also reduce the number of unsatisfiable packages issues, as those seem to usually come from the version specification.
| conda/cli/install.py
<|code_start|>
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import tarfile
import tempfile
from os.path import isdir, join, basename, exists, abspath
from difflib import get_close_matches
import conda.config as config
import conda.plan as plan
import conda.instructions as inst
import conda.misc as misc
from conda.api import get_index
from conda.cli import pscheck
from conda.cli import common
from conda.cli.find_commands import find_executable
from conda.resolve import NoPackagesFound, Resolve, MatchSpec
import conda.install as ci
def install_tar(prefix, tar_path, verbose=False):
from conda.misc import install_local_packages
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
depends = install_local_packages(prefix, paths, verbose=verbose)
shutil.rmtree(tmp_dir)
return depends
def check_prefix(prefix, json=False):
from conda.config import root_env_name
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == root_env_name:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
error = "prefix already exists: %s" % prefix
if error:
common.error_and_exit(error, json=json, error_type="ValueError")
def clone(src_arg, dst_prefix, json=False, quiet=False):
from conda.misc import clone_env
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
common.error_and_exit('no such directory: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
else:
src_prefix = common.find_prefix_name(src_arg)
if src_prefix is None:
common.error_and_exit('could not find environment: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
if not json:
print("src_prefix: %r" % src_prefix)
print("dst_prefix: %r" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
print("#")
print("# To activate this environment, use:")
if sys.platform == 'win32':
print("# > activate %s" % arg)
else:
print("# $ source activate %s" % arg)
print("#")
print("# To deactivate this environment, use:")
print("# $ source deactivate")
print("#")
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
common.error_and_exit("expected revision number, not: '%s'" % arg,
json=json,
error_type="ValueError")
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
newenv = bool(command == 'create')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = common.get_prefix(args, search=not newenv)
if newenv:
check_prefix(prefix, json=args.json)
if command == 'update':
if args.all:
if args.packages:
common.error_and_exit("""--all cannot be used with packages""",
json=args.json,
error_type="ValueError")
else:
if len(args.packages) == 0:
common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
json=args.json,
error_type="ValueError")
if command == 'update':
linked = ci.linked(prefix)
for name in args.packages:
common.arg2spec(name, json=args.json)
if '=' in name:
common.error_and_exit("Invalid package name: '%s'" % (name),
json=args.json,
error_type="ValueError")
if name not in set(ci.name_dist(d) for d in linked):
common.error_and_exit("package '%s' is not installed in %s" %
(name, prefix),
json=args.json,
error_type="ValueError")
if newenv and args.clone:
if args.packages:
common.error_and_exit('did not expect any arguments for --clone',
json=args.json,
error_type="ValueError")
clone(args.clone, prefix, json=args.json, quiet=args.quiet)
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
return
if newenv and not args.no_default_packages:
default_packages = config.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in config.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
specs = []
if args.file:
specs.extend(common.specs_from_url(args.file, json=args.json))
elif getattr(args, 'all', False):
linked = ci.linked(prefix)
if not linked:
common.error_and_exit("There are no packages installed in the "
"prefix %s" % prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
if name in getattr(args, '_skip', []):
continue
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s >=%s' % (name, ver))
specs.extend(common.specs_from_args(args.packages, json=args.json))
if command == 'install' and args.revision:
get_revision(args.revision, json=args.json)
else:
common.check_specs(prefix, specs, json=args.json,
create=(command == 'create'))
# handle tar file containing conda packages
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
from conda.misc import install_local_packages
depends = install_local_packages(prefix, args.packages,
verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
else:
common.error_and_exit(
"cannot mix specifications with conda package filenames",
json=args.json,
error_type="ValueError")
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
depends = install_tar(prefix, tar_path, verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls),
prepend=not args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown,
json=args.json,
offline=args.offline)
else:
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown,
json=args.json,
offline=args.offline)
# Don't update packages that are already up-to-date
if command == 'update' and not (args.all or args.force):
r = Resolve(index)
orig_packages = args.packages[:]
for name in orig_packages:
installed_metadata = [ci.is_linked(prefix, dist)
for dist in linked]
vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
if dist.rsplit('-', 2)[0] == name]
build_inst = [m['build_number'] for m in installed_metadata if
m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
except AssertionError as e:
if args.json:
common.exception_and_exit(e, json=True)
else:
raise
pkgs = sorted(r.get_pkgs(MatchSpec(name)))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if latest.version == vers_inst[0] and latest.build_number == build_inst[0]:
args.packages.remove(name)
if not args.packages:
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
spec_names = set(s.split()[0] for s in specs)
if args.no_deps:
only_names = spec_names
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
common.error_and_exit("Error: could not create directory: %s" % prefix,
json=args.json,
error_type="OSError")
else:
common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
json=args.json,
error_type="NoEnvironmentFound")
try:
if command == 'install' and args.revision:
actions = plan.revert_actions(prefix, get_revision(args.revision))
else:
actions = plan.install_actions(prefix, index, specs, force=args.force,
only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint)
if args.copy:
new_link = []
for pkg in actions["LINK"]:
dist, pkgs_dir, lt = inst.split_linkarg(pkg)
lt = ci.LINK_COPY
new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
actions["LINK"] = new_link
except NoPackagesFound as e:
error_message = e.args[0]
if command == 'update' and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not args.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', [])
args._skip.extend([i.split()[0] for i in e.pkgs])
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
for pkg in e.pkgs:
close = get_close_matches(pkg, packages, cutoff=0.7)
if close:
error_message += "\n\nDid you mean one of these?\n\n %s" % (', '.join(close))
error_message += '\n\nYou can search for this package on Binstar with'
error_message += '\n\n binstar search -t conda %s' % pkg
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message += '\n\n (and similarly for the other packages)'
binstar = find_executable('binstar', include_others=False)
if not binstar:
error_message += '\n\nYou may need to install the Binstar command line client with'
error_message += '\n\n conda install binstar'
common.error_and_exit(error_message, json=args.json)
except SystemExit as e:
# Unsatisfiable package specifications/no such revision/import error
error_type = 'UnsatisfiableSpecifications'
if e.args and 'could not import' in e.args[0]:
error_type = 'ImportError'
common.exception_and_exit(e, json=args.json, newline=True,
error_text=False,
error_type=error_type)
if plan.nothing_to_do(actions):
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(spec_names)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(message='All requested packages already installed.')
return
if not args.json:
print()
print("Package plan for installation in environment %s:" % prefix)
plan.display_actions(actions, index)
if command in {'install', 'update'}:
common.check_write(command, prefix)
if not args.json:
if not pscheck.main(args):
common.confirm_yn(args)
else:
if (sys.platform == 'win32' and not args.force_pscheck and
not pscheck.check_processes(verbose=False)):
common.error_and_exit("Cannot continue operation while processes "
"from packages are running without --force-pscheck.",
json=True,
error_type="ProcessesStillRunning")
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
sys.exit(0)
with common.json_progress_bars(json=args.json and not args.quiet):
try:
plan.execute_actions(actions, index, verbose=not args.quiet)
if not (command == 'update' and args.all):
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
error_type = "AlreadyLocked"
else:
error_type = "RuntimeError"
common.exception_and_exit(e, error_type=error_type, json=args.json)
except SystemExit as e:
common.exception_and_exit(e, json=args.json)
if newenv:
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
if args.json:
common.stdout_json_success(actions=actions)
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False):
try:
prefix = tempfile.mkdtemp('conda')
specs = common.specs_from_args(packages)
index = get_index(channel_urls=channel_urls, prepend=prepend,
platform=platform)
actions = plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint)
plan.display_actions(actions, index)
return actions
finally:
ci.rm_rf(prefix)
<|code_end|>
| conda/cli/install.py
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import tarfile
import tempfile
from os.path import isdir, join, basename, exists, abspath
from difflib import get_close_matches
import conda.config as config
import conda.plan as plan
import conda.instructions as inst
import conda.misc as misc
from conda.api import get_index
from conda.cli import pscheck
from conda.cli import common
from conda.cli.find_commands import find_executable
from conda.resolve import NoPackagesFound, Resolve, MatchSpec
import conda.install as ci
def install_tar(prefix, tar_path, verbose=False):
from conda.misc import install_local_packages
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
depends = install_local_packages(prefix, paths, verbose=verbose)
shutil.rmtree(tmp_dir)
return depends
def check_prefix(prefix, json=False):
from conda.config import root_env_name
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == root_env_name:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
error = "prefix already exists: %s" % prefix
if error:
common.error_and_exit(error, json=json, error_type="ValueError")
def clone(src_arg, dst_prefix, json=False, quiet=False):
from conda.misc import clone_env
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
common.error_and_exit('no such directory: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
else:
src_prefix = common.find_prefix_name(src_arg)
if src_prefix is None:
common.error_and_exit('could not find environment: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
if not json:
print("src_prefix: %r" % src_prefix)
print("dst_prefix: %r" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
print("#")
print("# To activate this environment, use:")
if sys.platform == 'win32':
print("# > activate %s" % arg)
else:
print("# $ source activate %s" % arg)
print("#")
print("# To deactivate this environment, use:")
print("# $ source deactivate")
print("#")
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
common.error_and_exit("expected revision number, not: '%s'" % arg,
json=json,
error_type="ValueError")
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
newenv = bool(command == 'create')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = common.get_prefix(args, search=not newenv)
if newenv:
check_prefix(prefix, json=args.json)
if command == 'update':
if args.all:
if args.packages:
common.error_and_exit("""--all cannot be used with packages""",
json=args.json,
error_type="ValueError")
else:
if len(args.packages) == 0:
common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
json=args.json,
error_type="ValueError")
if command == 'update':
linked = ci.linked(prefix)
for name in args.packages:
common.arg2spec(name, json=args.json)
if '=' in name:
common.error_and_exit("Invalid package name: '%s'" % (name),
json=args.json,
error_type="ValueError")
if name not in set(ci.name_dist(d) for d in linked):
common.error_and_exit("package '%s' is not installed in %s" %
(name, prefix),
json=args.json,
error_type="ValueError")
if newenv and args.clone:
if args.packages:
common.error_and_exit('did not expect any arguments for --clone',
json=args.json,
error_type="ValueError")
clone(args.clone, prefix, json=args.json, quiet=args.quiet)
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
return
if newenv and not args.no_default_packages:
default_packages = config.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in config.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
specs = []
if args.file:
specs.extend(common.specs_from_url(args.file, json=args.json))
elif getattr(args, 'all', False):
linked = ci.linked(prefix)
if not linked:
common.error_and_exit("There are no packages installed in the "
"prefix %s" % prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
if name in getattr(args, '_skip', ['anaconda']):
continue
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s' % name)
specs.extend(common.specs_from_args(args.packages, json=args.json))
if command == 'install' and args.revision:
get_revision(args.revision, json=args.json)
else:
common.check_specs(prefix, specs, json=args.json,
create=(command == 'create'))
# handle tar file containing conda packages
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
from conda.misc import install_local_packages
depends = install_local_packages(prefix, args.packages,
verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
else:
common.error_and_exit(
"cannot mix specifications with conda package filenames",
json=args.json,
error_type="ValueError")
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
depends = install_tar(prefix, tar_path, verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls),
prepend=not args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown,
json=args.json,
offline=args.offline)
else:
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown,
json=args.json,
offline=args.offline)
# Don't update packages that are already up-to-date
if command == 'update' and not (args.all or args.force):
r = Resolve(index)
orig_packages = args.packages[:]
for name in orig_packages:
installed_metadata = [ci.is_linked(prefix, dist)
for dist in linked]
vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
if dist.rsplit('-', 2)[0] == name]
build_inst = [m['build_number'] for m in installed_metadata if
m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
except AssertionError as e:
if args.json:
common.exception_and_exit(e, json=True)
else:
raise
pkgs = sorted(r.get_pkgs(MatchSpec(name)))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if latest.version == vers_inst[0] and latest.build_number == build_inst[0]:
args.packages.remove(name)
if not args.packages:
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
spec_names = set(s.split()[0] for s in specs)
if args.no_deps:
only_names = spec_names
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
common.error_and_exit("Error: could not create directory: %s" % prefix,
json=args.json,
error_type="OSError")
else:
common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
json=args.json,
error_type="NoEnvironmentFound")
try:
if command == 'install' and args.revision:
actions = plan.revert_actions(prefix, get_revision(args.revision))
else:
actions = plan.install_actions(prefix, index, specs, force=args.force,
only_names=only_names, pinned=args.pinned, minimal_hint=args.alt_hint)
if args.copy:
new_link = []
for pkg in actions["LINK"]:
dist, pkgs_dir, lt = inst.split_linkarg(pkg)
lt = ci.LINK_COPY
new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
actions["LINK"] = new_link
except NoPackagesFound as e:
error_message = e.args[0]
if command == 'update' and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not args.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
args._skip.extend([i.split()[0] for i in e.pkgs])
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
for pkg in e.pkgs:
close = get_close_matches(pkg, packages, cutoff=0.7)
if close:
error_message += "\n\nDid you mean one of these?\n\n %s" % (', '.join(close))
error_message += '\n\nYou can search for this package on Binstar with'
error_message += '\n\n binstar search -t conda %s' % pkg
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message += '\n\n (and similarly for the other packages)'
binstar = find_executable('binstar', include_others=False)
if not binstar:
error_message += '\n\nYou may need to install the Binstar command line client with'
error_message += '\n\n conda install binstar'
common.error_and_exit(error_message, json=args.json)
except SystemExit as e:
# Unsatisfiable package specifications/no such revision/import error
error_type = 'UnsatisfiableSpecifications'
if e.args and 'could not import' in e.args[0]:
error_type = 'ImportError'
common.exception_and_exit(e, json=args.json, newline=True,
error_text=False,
error_type=error_type)
if plan.nothing_to_do(actions):
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(spec_names)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(message='All requested packages already installed.')
return
if not args.json:
print()
print("Package plan for installation in environment %s:" % prefix)
plan.display_actions(actions, index)
if command in {'install', 'update'}:
common.check_write(command, prefix)
if not args.json:
if not pscheck.main(args):
common.confirm_yn(args)
else:
if (sys.platform == 'win32' and not args.force_pscheck and
not pscheck.check_processes(verbose=False)):
common.error_and_exit("Cannot continue operation while processes "
"from packages are running without --force-pscheck.",
json=True,
error_type="ProcessesStillRunning")
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
sys.exit(0)
with common.json_progress_bars(json=args.json and not args.quiet):
try:
plan.execute_actions(actions, index, verbose=not args.quiet)
if not (command == 'update' and args.all):
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
error_type = "AlreadyLocked"
else:
error_type = "RuntimeError"
common.exception_and_exit(e, error_type=error_type, json=args.json)
except SystemExit as e:
common.exception_and_exit(e, json=args.json)
if newenv:
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
if args.json:
common.stdout_json_success(actions=actions)
def check_install(packages, platform=None, channel_urls=(), prepend=True, minimal_hint=False):
try:
prefix = tempfile.mkdtemp('conda')
specs = common.specs_from_args(packages)
index = get_index(channel_urls=channel_urls, prepend=prepend,
platform=platform)
actions = plan.install_actions(prefix, index, specs, pinned=False, minimal_hint=minimal_hint)
plan.display_actions(actions, index)
return actions
finally:
ci.rm_rf(prefix)
| conda/cli/install.py
--- a/conda/cli/install.py
+++ b/conda/cli/install.py
@@ -191,13 +191,13 @@ def install(args, parser, command='install'):
"prefix %s" % prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
- if name in getattr(args, '_skip', []):
+ if name in getattr(args, '_skip', ['anaconda']):
continue
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
- specs.append('%s >=%s' % (name, ver))
+ specs.append('%s' % name)
specs.extend(common.specs_from_args(args.packages, json=args.json))
if command == 'install' and args.revision:
@@ -345,7 +345,7 @@ def install(args, parser, command='install'):
else:
# Not sure what to do here
pass
- args._skip = getattr(args, '_skip', [])
+ args._skip = getattr(args, '_skip', ['anaconda'])
args._skip.extend([i.split()[0] for i in e.pkgs])
return install(args, parser, command=command)
else: |
conda clean -t fails with FileNotFoundError
```
[root@localhost conda-recipes]# conda clean -t
An unexpected error has occurred, please consider sending the
following traceback to the conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Include the output of the command 'conda info' in your report.
Traceback (most recent call last):
File "/root/anaconda/bin/conda", line 5, in <module>
sys.exit(main())
File "/home/aaronmeurer/conda/conda/cli/main.py", line 202, in main
args_func(args, p)
File "/home/aaronmeurer/conda/conda/cli/main.py", line 207, in args_func
args.func(args, p)
File "/home/aaronmeurer/conda/conda/cli/main_clean.py", line 317, in execute
pkgs_dir, rmlist, totalsize = find_tarballs()
File "/home/aaronmeurer/conda/conda/cli/main_clean.py", line 108, in find_tarballs
for fn in os.listdir(pkgs_dir):
FileNotFoundError: [Errno 2] No such file or directory: '/root/.conda/envs/.pkgs'
```
| conda/cli/main_clean.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from argparse import RawDescriptionHelpFormatter
import os
import sys
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
from conda.cli import common
import conda.config as config
from conda.utils import human_bytes
from conda.install import rm_rf
descr = """
Remove unused packages and caches.
"""
example = """
examples:
conda clean --tarballs
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'clean',
formatter_class = RawDescriptionHelpFormatter,
description = descr,
help = descr,
epilog = example,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"-i", "--index-cache",
action = "store_true",
help = "remove index cache",
)
p.add_argument(
"-l", "--lock",
action = "store_true",
help = "remove all conda lock files",
)
p.add_argument(
"-t", "--tarballs",
action = "store_true",
help = "remove cached package tarballs",
)
p.add_argument(
'-p', '--packages',
action='store_true',
help="""remove unused cached packages. Warning: this does not check
for symlinked packages.""",
)
p.add_argument(
'-s', '--source-cache',
action='store_true',
help="""remove files from the source cache of conda build""",
)
p.set_defaults(func=execute)
def find_lock():
from os.path import join
from conda.lock import LOCKFN
lock_dirs = config.pkgs_dirs
lock_dirs += [config.root_dir]
for envs_dir in config.envs_dirs:
if os.path.exists(envs_dir):
for fn in os.listdir(envs_dir):
if os.path.isdir(join(envs_dir, fn)):
lock_dirs.append(join(envs_dir, fn))
try:
from conda_build.config import croot
lock_dirs.append(croot)
except ImportError:
pass
for dir in lock_dirs:
if not os.path.exists(dir):
continue
for dn in os.listdir(dir):
if os.path.isdir(join(dir, dn)) and dn.startswith(LOCKFN):
path = join(dir, dn)
yield path
def rm_lock(locks, verbose=True):
for path in locks:
if verbose:
print('removing: %s' % path)
os.rmdir(path)
def find_tarballs():
pkgs_dir = config.pkgs_dirs[0]
rmlist = []
for fn in os.listdir(pkgs_dir):
if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
rmlist.append(fn)
if not rmlist:
return pkgs_dir, rmlist, 0
totalsize = 0
for fn in rmlist:
size = getsize(join(pkgs_dir, fn))
totalsize += size
return pkgs_dir, rmlist, totalsize
def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True):
if verbose:
print('Cache location: %s' % pkgs_dir)
if not rmlist:
if verbose:
print("There are no tarballs to remove")
return
if verbose:
print("Will remove the following tarballs:")
print()
maxlen = len(max(rmlist, key=lambda x: len(str(x))))
fmt = "%-40s %10s"
for fn in rmlist:
size = getsize(join(pkgs_dir, fn))
print(fmt % (fn, human_bytes(size)))
print('-' * (maxlen + 2 + 10))
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for fn in rmlist:
if verbose:
print("removing %s" % fn)
os.unlink(os.path.join(pkgs_dir, fn))
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
pkgs_dir = config.pkgs_dirs[0]
warnings = []
rmlist = []
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
for pkg in pkgs:
breakit = False
for root, dir, files in walk(join(pkgs_dir, pkg)):
if breakit:
break
for fn in files:
try:
stat = lstat(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
if stat.st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break
else:
rmlist.append(pkg)
if not rmlist:
return pkgs_dir, rmlist, warnings, 0, []
totalsize = 0
pkgsizes = []
for pkg in rmlist:
pkgsize = 0
for root, dir, files in walk(join(pkgs_dir, pkg)):
for fn in files:
# We don't have to worry about counting things twice: by
# definition these files all have a link count of 1!
size = lstat(join(root, fn)).st_size
totalsize += size
pkgsize += size
pkgsizes.append(pkgsize)
return pkgs_dir, rmlist, warnings, totalsize, pkgsizes
def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
print('Cache location: %s' % pkgs_dir)
for fn, exception in warnings:
print(exception)
if not rmlist:
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
print()
maxlen = len(max(rmlist, key=lambda x: len(str(x))))
fmt = "%-40s %10s"
for pkg, pkgsize in zip(rmlist, pkgsizes):
print(fmt % (pkg, human_bytes(pkgsize)))
print('-' * (maxlen + 2 + 10))
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkg in rmlist:
if verbose:
print("removing %s" % pkg)
rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
from conda.install import rm_rf
rm_rf(join(config.pkgs_dirs[0], 'cache'))
def find_source_cache():
try:
import conda_build.source
except ImportError:
return {
'warnings': ["conda-build is not installed; could not clean source cache"],
'cache_dirs': [],
'cache_sizes': {},
'total_size': 0,
}
cache_dirs = {
'source cache': conda_build.source.SRC_CACHE,
'git cache': conda_build.source.GIT_CACHE,
'hg cache': conda_build.source.HG_CACHE,
'svn cache': conda_build.source.SVN_CACHE,
}
sizes = {}
totalsize = 0
for cache_type, cache_dir in cache_dirs.items():
dirsize = 0
for root, d, files in walk(cache_dir):
for fn in files:
size = lstat(join(root, fn)).st_size
totalsize += size
dirsize += size
sizes[cache_type] = dirsize
return {
'warnings': [],
'cache_dirs': cache_dirs,
'cache_sizes': sizes,
'total_size': totalsize,
}
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
verbose = not args.json
if warnings:
if verbose:
for warning in warnings:
print(warning, file=sys.stderr)
return
for cache_type in cache_dirs:
print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
print("%-40s %10s" % ("Size:",
human_bytes(cache_sizes[cache_type])))
print()
print("%-40s %10s" % ("Total:", human_bytes(total_size)))
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for dir in cache_dirs.values():
print("Removing %s" % dir)
rm_rf(dir)
def execute(args, parser):
json_result = {
'success': True
}
if args.lock:
locks = list(find_lock())
json_result['lock'] = {
'files': locks
}
rm_lock(locks, verbose=not args.json)
if args.tarballs:
pkgs_dir, rmlist, totalsize = find_tarballs()
json_result['tarballs'] = {
'pkgs_dir': pkgs_dir,
'files': rmlist,
'total_size': totalsize
}
rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
'files': [join(config.pkgs_dirs[0], 'cache')]
}
rm_index_cache()
if args.packages:
pkgs_dir, rmlist, warnings, totalsize, pkgsizes = find_pkgs()
json_result['packages'] = {
'pkgs_dir': pkgs_dir,
'files': rmlist,
'total_size': totalsize,
'warnings': warnings,
'pkg_sizes': dict(zip(rmlist, pkgsizes))
}
rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache:
json_result['source_cache'] = find_source_cache()
rm_source_cache(args, **json_result['source_cache'])
if not (args.lock or args.tarballs or args.index_cache or args.packages or
args.source_cache):
common.error_and_exit(
"One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required",
error_type="ValueError")
if args.json:
common.stdout_json(json_result)
<|code_end|>
| conda/cli/main_clean.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from argparse import RawDescriptionHelpFormatter
import os
import sys
from collections import defaultdict
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
from conda.cli import common
import conda.config as config
from conda.utils import human_bytes
from conda.install import rm_rf
descr = """
Remove unused packages and caches.
"""
example = """
examples:
conda clean --tarballs
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'clean',
formatter_class = RawDescriptionHelpFormatter,
description = descr,
help = descr,
epilog = example,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"-i", "--index-cache",
action = "store_true",
help = "remove index cache",
)
p.add_argument(
"-l", "--lock",
action = "store_true",
help = "remove all conda lock files",
)
p.add_argument(
"-t", "--tarballs",
action = "store_true",
help = "remove cached package tarballs",
)
p.add_argument(
'-p', '--packages',
action='store_true',
help="""remove unused cached packages. Warning: this does not check
for symlinked packages.""",
)
p.add_argument(
'-s', '--source-cache',
action='store_true',
help="""remove files from the source cache of conda build""",
)
p.set_defaults(func=execute)
def find_lock():
from os.path import join
from conda.lock import LOCKFN
lock_dirs = config.pkgs_dirs
lock_dirs += [config.root_dir]
for envs_dir in config.envs_dirs:
if os.path.exists(envs_dir):
for fn in os.listdir(envs_dir):
if os.path.isdir(join(envs_dir, fn)):
lock_dirs.append(join(envs_dir, fn))
try:
from conda_build.config import croot
lock_dirs.append(croot)
except ImportError:
pass
for dir in lock_dirs:
if not os.path.exists(dir):
continue
for dn in os.listdir(dir):
if os.path.isdir(join(dir, dn)) and dn.startswith(LOCKFN):
path = join(dir, dn)
yield path
def rm_lock(locks, verbose=True):
for path in locks:
if verbose:
print('removing: %s' % path)
os.rmdir(path)
def find_tarballs():
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for fn in os.listdir(pkgs_dir):
if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
pkgs_dirs[pkgs_dir].append(fn)
totalsize = 0
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
totalsize += size
return pkgs_dirs, totalsize
def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no tarballs to remove")
return
if verbose:
print("Will remove the following tarballs:")
print()
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-'*len(pkgs_dir))
fmt = "%-40s %10s"
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
print(fmt % (fn, human_bytes(size)))
print()
print('-' * 51) # From 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % fn)
os.unlink(os.path.join(pkgs_dir, fn))
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
warnings = []
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
for pkg in pkgs:
breakit = False
for root, dir, files in walk(join(pkgs_dir, pkg)):
if breakit:
break
for fn in files:
try:
stat = lstat(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
if stat.st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break
else:
pkgs_dirs[pkgs_dir].append(pkg)
totalsize = 0
pkgsizes = defaultdict(list)
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
pkgsize = 0
for root, dir, files in walk(join(pkgs_dir, pkg)):
for fn in files:
# We don't have to worry about counting things twice: by
# definition these files all have a link count of 1!
size = lstat(join(root, fn)).st_size
totalsize += size
pkgsize += size
pkgsizes[pkgs_dir].append(pkgsize)
return pkgs_dirs, warnings, totalsize, pkgsizes
def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
for fn, exception in warnings:
print(exception)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-' * len(pkgs_dir))
print()
fmt = "%-40s %10s"
for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
print(fmt % (pkg, human_bytes(pkgsize)))
print()
print('-' * 51) # 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % pkg)
rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
from conda.install import rm_rf
rm_rf(join(config.pkgs_dirs[0], 'cache'))
def find_source_cache():
try:
import conda_build.source
except ImportError:
return {
'warnings': ["conda-build is not installed; could not clean source cache"],
'cache_dirs': [],
'cache_sizes': {},
'total_size': 0,
}
cache_dirs = {
'source cache': conda_build.source.SRC_CACHE,
'git cache': conda_build.source.GIT_CACHE,
'hg cache': conda_build.source.HG_CACHE,
'svn cache': conda_build.source.SVN_CACHE,
}
sizes = {}
totalsize = 0
for cache_type, cache_dir in cache_dirs.items():
dirsize = 0
for root, d, files in walk(cache_dir):
for fn in files:
size = lstat(join(root, fn)).st_size
totalsize += size
dirsize += size
sizes[cache_type] = dirsize
return {
'warnings': [],
'cache_dirs': cache_dirs,
'cache_sizes': sizes,
'total_size': totalsize,
}
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
verbose = not args.json
if warnings:
if verbose:
for warning in warnings:
print(warning, file=sys.stderr)
return
for cache_type in cache_dirs:
print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
print("%-40s %10s" % ("Size:",
human_bytes(cache_sizes[cache_type])))
print()
print("%-40s %10s" % ("Total:", human_bytes(total_size)))
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for dir in cache_dirs.values():
print("Removing %s" % dir)
rm_rf(dir)
def execute(args, parser):
json_result = {
'success': True
}
if args.lock:
locks = list(find_lock())
json_result['lock'] = {
'files': locks
}
rm_lock(locks, verbose=not args.json)
if args.tarballs:
pkgs_dirs, totalsize = find_tarballs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['tarballs'] = {
'pkgs_dir': first, # Backwards compabitility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize
}
rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
'files': [join(config.pkgs_dirs[0], 'cache')]
}
rm_index_cache()
if args.packages:
pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['packages'] = {
'pkgs_dir': first, # Backwards compatibility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize,
'warnings': warnings,
'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs},
}
rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache:
json_result['source_cache'] = find_source_cache()
rm_source_cache(args, **json_result['source_cache'])
if not (args.lock or args.tarballs or args.index_cache or args.packages or
args.source_cache):
common.error_and_exit(
"One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required",
error_type="ValueError")
if args.json:
common.stdout_json(json_result)
| conda/cli/main_clean.py
--- a/conda/cli/main_clean.py
+++ b/conda/cli/main_clean.py
@@ -8,6 +8,7 @@
from argparse import RawDescriptionHelpFormatter
import os
import sys
+from collections import defaultdict
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
@@ -102,29 +103,29 @@ def rm_lock(locks, verbose=True):
def find_tarballs():
- pkgs_dir = config.pkgs_dirs[0]
-
- rmlist = []
- for fn in os.listdir(pkgs_dir):
- if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
- rmlist.append(fn)
-
- if not rmlist:
- return pkgs_dir, rmlist, 0
+ pkgs_dirs = defaultdict(list)
+ for pkgs_dir in config.pkgs_dirs:
+ if not isdir(pkgs_dir):
+ continue
+ for fn in os.listdir(pkgs_dir):
+ if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
+ pkgs_dirs[pkgs_dir].append(fn)
totalsize = 0
- for fn in rmlist:
- size = getsize(join(pkgs_dir, fn))
- totalsize += size
+ for pkgs_dir in pkgs_dirs:
+ for fn in pkgs_dirs[pkgs_dir]:
+ size = getsize(join(pkgs_dir, fn))
+ totalsize += size
- return pkgs_dir, rmlist, totalsize
+ return pkgs_dirs, totalsize
-def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True):
+def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
if verbose:
- print('Cache location: %s' % pkgs_dir)
+ for pkgs_dir in pkgs_dirs:
+ print('Cache location: %s' % pkgs_dir)
- if not rmlist:
+ if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no tarballs to remove")
return
@@ -133,12 +134,15 @@ def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True):
print("Will remove the following tarballs:")
print()
- maxlen = len(max(rmlist, key=lambda x: len(str(x))))
- fmt = "%-40s %10s"
- for fn in rmlist:
- size = getsize(join(pkgs_dir, fn))
- print(fmt % (fn, human_bytes(size)))
- print('-' * (maxlen + 2 + 10))
+ for pkgs_dir in pkgs_dirs:
+ print(pkgs_dir)
+ print('-'*len(pkgs_dir))
+ fmt = "%-40s %10s"
+ for fn in pkgs_dirs[pkgs_dir]:
+ size = getsize(join(pkgs_dir, fn))
+ print(fmt % (fn, human_bytes(size)))
+ print()
+ print('-' * 51) # From 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
@@ -147,79 +151,82 @@ def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True):
if args.json and args.dry_run:
return
- for fn in rmlist:
- if verbose:
- print("removing %s" % fn)
- os.unlink(os.path.join(pkgs_dir, fn))
+ for pkgs_dir in pkgs_dirs:
+ for fn in pkgs_dirs[pkgs_dir]:
+ if verbose:
+ print("removing %s" % fn)
+ os.unlink(os.path.join(pkgs_dir, fn))
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
- pkgs_dir = config.pkgs_dirs[0]
warnings = []
- rmlist = []
- pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
- # Only include actual packages
- isdir(join(pkgs_dir, i, 'info'))]
- for pkg in pkgs:
- breakit = False
- for root, dir, files in walk(join(pkgs_dir, pkg)):
- if breakit:
- break
- for fn in files:
- try:
- stat = lstat(join(root, fn))
- except OSError as e:
- warnings.append((fn, e))
- continue
- if stat.st_nlink > 1:
- # print('%s is installed: %s' % (pkg, join(root, fn)))
- breakit = True
+ pkgs_dirs = defaultdict(list)
+ for pkgs_dir in config.pkgs_dirs:
+ pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
+ # Only include actual packages
+ isdir(join(pkgs_dir, i, 'info'))]
+ for pkg in pkgs:
+ breakit = False
+ for root, dir, files in walk(join(pkgs_dir, pkg)):
+ if breakit:
break
- else:
- rmlist.append(pkg)
-
- if not rmlist:
- return pkgs_dir, rmlist, warnings, 0, []
+ for fn in files:
+ try:
+ stat = lstat(join(root, fn))
+ except OSError as e:
+ warnings.append((fn, e))
+ continue
+ if stat.st_nlink > 1:
+ # print('%s is installed: %s' % (pkg, join(root, fn)))
+ breakit = True
+ break
+ else:
+ pkgs_dirs[pkgs_dir].append(pkg)
totalsize = 0
- pkgsizes = []
- for pkg in rmlist:
- pkgsize = 0
- for root, dir, files in walk(join(pkgs_dir, pkg)):
- for fn in files:
- # We don't have to worry about counting things twice: by
- # definition these files all have a link count of 1!
- size = lstat(join(root, fn)).st_size
- totalsize += size
- pkgsize += size
- pkgsizes.append(pkgsize)
-
- return pkgs_dir, rmlist, warnings, totalsize, pkgsizes
-
-
-def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
+ pkgsizes = defaultdict(list)
+ for pkgs_dir in pkgs_dirs:
+ for pkg in pkgs_dirs[pkgs_dir]:
+ pkgsize = 0
+ for root, dir, files in walk(join(pkgs_dir, pkg)):
+ for fn in files:
+ # We don't have to worry about counting things twice: by
+ # definition these files all have a link count of 1!
+ size = lstat(join(root, fn)).st_size
+ totalsize += size
+ pkgsize += size
+ pkgsizes[pkgs_dir].append(pkgsize)
+
+ return pkgs_dirs, warnings, totalsize, pkgsizes
+
+
+def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
- print('Cache location: %s' % pkgs_dir)
- for fn, exception in warnings:
- print(exception)
+ for pkgs_dir in pkgs_dirs:
+ print('Cache location: %s' % pkgs_dir)
+ for fn, exception in warnings:
+ print(exception)
- if not rmlist:
+ if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
- print()
- maxlen = len(max(rmlist, key=lambda x: len(str(x))))
- fmt = "%-40s %10s"
- for pkg, pkgsize in zip(rmlist, pkgsizes):
- print(fmt % (pkg, human_bytes(pkgsize)))
- print('-' * (maxlen + 2 + 10))
+ for pkgs_dir in pkgs_dirs:
+ print(pkgs_dir)
+ print('-' * len(pkgs_dir))
+ print()
+ fmt = "%-40s %10s"
+ for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
+ print(fmt % (pkg, human_bytes(pkgsize)))
+ print()
+ print('-' * 51) # 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
@@ -228,10 +235,11 @@ def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
if args.json and args.dry_run:
return
- for pkg in rmlist:
- if verbose:
- print("removing %s" % pkg)
- rm_rf(join(pkgs_dir, pkg))
+ for pkgs_dir in pkgs_dirs:
+ for pkg in pkgs_dirs[pkgs_dir]:
+ if verbose:
+ print("removing %s" % pkg)
+ rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
@@ -314,13 +322,15 @@ def execute(args, parser):
rm_lock(locks, verbose=not args.json)
if args.tarballs:
- pkgs_dir, rmlist, totalsize = find_tarballs()
+ pkgs_dirs, totalsize = find_tarballs()
+ first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['tarballs'] = {
- 'pkgs_dir': pkgs_dir,
- 'files': rmlist,
+ 'pkgs_dir': first, # Backwards compabitility
+ 'pkgs_dirs': dict(pkgs_dirs),
+ 'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize
}
- rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=not args.json)
+ rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
@@ -329,15 +339,17 @@ def execute(args, parser):
rm_index_cache()
if args.packages:
- pkgs_dir, rmlist, warnings, totalsize, pkgsizes = find_pkgs()
+ pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
+ first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['packages'] = {
- 'pkgs_dir': pkgs_dir,
- 'files': rmlist,
+ 'pkgs_dir': first, # Backwards compatibility
+ 'pkgs_dirs': dict(pkgs_dirs),
+ 'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize,
'warnings': warnings,
- 'pkg_sizes': dict(zip(rmlist, pkgsizes))
+ 'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs},
}
- rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes,
+ rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache: |
conda remove --dry-run actually removes the package
Is there anything set around here:
https://github.com/conda/conda/blob/ded940c3fa845bbb86b3492e4a7c883c1bcec10b/conda/cli/main_remove.py#L196
to actually exit before removing the package if --dry-run is set but --json is not?
I'm just running 3.11.0 and haven't grabbed and functionally tested master to see if this is still a problem.
| conda/cli/main_remove.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from os.path import join
import argparse
from argparse import RawDescriptionHelpFormatter
import errno
import logging
from conda import config
from conda import plan
from conda.cli import common
from conda.console import json_progress_bars
help = "%s a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution. Note: conda uninstall is an alias for conda remove.
"""
example = """
Examples:
conda %s -n myenv scipy
"""
uninstall_help = "Alias for conda remove. See conda remove --help."
log = logging.getLogger(__name__)
def configure_parser(sub_parsers, name='remove'):
if name == 'remove':
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=descr % name.capitalize(),
help=help % name.capitalize(),
epilog=example % name,
add_help=False,
)
else:
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=uninstall_help,
help=uninstall_help,
epilog=example % name,
add_help=False,
)
common.add_parser_help(p)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"--all",
action="store_true",
help="%s all packages, i.e., the entire environment." % name.capitalize(),
)
p.add_argument(
"--features",
action="store_true",
help="%s features (instead of packages)." % name.capitalize(),
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
common.add_parser_use_index_cache(p)
common.add_parser_use_local(p)
common.add_parser_offline(p)
p.add_argument(
"--force-pscheck",
action="store_true",
help=("Force removal (when package process is running) (deprecated)"
if config.platform == 'win' else argparse.SUPPRESS)
)
p.add_argument(
'package_names',
metavar='package_name',
action="store",
nargs='*',
help="Package names to %s from the environment." % name,
).completer = common.InstalledPackages
p.set_defaults(func=execute)
@common.deprecation_warning
def execute(args, parser):
import sys
import conda.plan as plan
import conda.instructions as inst
from conda.install import rm_rf, linked
from conda import config
if not (args.all or args.package_names):
common.error_and_exit('no package names supplied,\n'
' try "conda remove -h" for more details',
json=args.json,
error_type="ValueError")
prefix = common.get_prefix(args)
if args.all and prefix == config.default_prefix:
common.error_and_exit("cannot remove current environment. deactivate and run conda remove again")
common.check_write('remove', prefix, json=args.json)
common.ensure_override_channels_requires_channel(args, json=args.json)
channel_urls = args.channel or ()
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls),
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
else:
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
specs = None
if args.features:
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
common.error_and_exit('cannot remove root environment,\n'
' add -n NAME or -p PREFIX option',
json=args.json,
error_type="CantRemoveRoot")
actions = {inst.PREFIX: prefix}
for dist in sorted(linked(prefix)):
plan.add_unlink(actions, dist)
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
common.error_and_exit('cannot remove %s from root environment' %
', '.join(common.root_no_rm),
json=args.json,
error_type="CantRemoveFromRoot")
actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
return
common.error_and_exit('no packages found to remove from '
'environment: %s' % prefix,
json=args.json,
error_type="PackageNotInstalled")
if not args.json:
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if args.json and args.dry_run:
common.stdout_json({
'success': True,
'dry_run': True,
'actions': actions
})
return
if args.json and not args.quiet:
with json_progress_bars():
plan.execute_actions(actions, index, verbose=not args.quiet)
else:
plan.execute_actions(actions, index, verbose=not args.quiet)
if specs:
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# remove specs: %s\n' % specs)
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
<|code_end|>
| conda/cli/main_remove.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from os.path import join
import argparse
from argparse import RawDescriptionHelpFormatter
import errno
import logging
from conda import config
from conda import plan
from conda.cli import common
from conda.console import json_progress_bars
help = "%s a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution. Note: conda uninstall is an alias for conda remove.
"""
example = """
Examples:
conda %s -n myenv scipy
"""
uninstall_help = "Alias for conda remove. See conda remove --help."
log = logging.getLogger(__name__)
def configure_parser(sub_parsers, name='remove'):
if name == 'remove':
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=descr % name.capitalize(),
help=help % name.capitalize(),
epilog=example % name,
add_help=False,
)
else:
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=uninstall_help,
help=uninstall_help,
epilog=example % name,
add_help=False,
)
common.add_parser_help(p)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"--all",
action="store_true",
help="%s all packages, i.e., the entire environment." % name.capitalize(),
)
p.add_argument(
"--features",
action="store_true",
help="%s features (instead of packages)." % name.capitalize(),
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
common.add_parser_use_index_cache(p)
common.add_parser_use_local(p)
common.add_parser_offline(p)
p.add_argument(
"--force-pscheck",
action="store_true",
help=("Force removal (when package process is running) (deprecated)"
if config.platform == 'win' else argparse.SUPPRESS)
)
p.add_argument(
'package_names',
metavar='package_name',
action="store",
nargs='*',
help="Package names to %s from the environment." % name,
).completer = common.InstalledPackages
p.set_defaults(func=execute)
@common.deprecation_warning
def execute(args, parser):
import sys
import conda.plan as plan
import conda.instructions as inst
from conda.install import rm_rf, linked
from conda import config
if not (args.all or args.package_names):
common.error_and_exit('no package names supplied,\n'
' try "conda remove -h" for more details',
json=args.json,
error_type="ValueError")
prefix = common.get_prefix(args)
if args.all and prefix == config.default_prefix:
common.error_and_exit("cannot remove current environment. deactivate and run conda remove again")
common.check_write('remove', prefix, json=args.json)
common.ensure_override_channels_requires_channel(args, json=args.json)
channel_urls = args.channel or ()
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
index = common.get_index_trap(channel_urls=[url_path(croot)] + list(channel_urls),
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
else:
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
specs = None
if args.features:
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
common.error_and_exit('cannot remove root environment,\n'
' add -n NAME or -p PREFIX option',
json=args.json,
error_type="CantRemoveRoot")
actions = {inst.PREFIX: prefix}
for dist in sorted(linked(prefix)):
plan.add_unlink(actions, dist)
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
common.error_and_exit('cannot remove %s from root environment' %
', '.join(common.root_no_rm),
json=args.json,
error_type="CantRemoveFromRoot")
actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
return
common.error_and_exit('no packages found to remove from '
'environment: %s' % prefix,
json=args.json,
error_type="PackageNotInstalled")
if not args.json:
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if args.json and args.dry_run:
common.stdout_json({
'success': True,
'dry_run': True,
'actions': actions
})
return
if not args.json:
common.confirm_yn(args)
if args.json and not args.quiet:
with json_progress_bars():
plan.execute_actions(actions, index, verbose=not args.quiet)
else:
plan.execute_actions(actions, index, verbose=not args.quiet)
if specs:
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# remove specs: %s\n' % specs)
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
| conda/cli/main_remove.py
--- a/conda/cli/main_remove.py
+++ b/conda/cli/main_remove.py
@@ -190,6 +190,9 @@ def execute(args, parser):
return
+ if not args.json:
+ common.confirm_yn(args)
+
if args.json and not args.quiet:
with json_progress_bars():
plan.execute_actions(actions, index, verbose=not args.quiet) |
https proxy username/password
I have installed anaconda using the command "bash Anaconda-2.3.0-Linux-x86_64.sh" and gave path of bin/conda to .bashrc and default it asks for bin path prepending which I gave, after closing and then running terminal I ran:
conda create -n dato-env python=2.7
which requires https proxy unsername and password for metadata, following is the error:
Password:
An unexpected error has occurred, please consider sending the
following traceback to the conda GitHub issue tracker at:
```
https://github.com/conda/conda/issues
```
Include the output of the command 'conda info' in your report.
Traceback (most recent call last):
File "/home/mayank/anaconda/bin/conda", line 5, in <module>
sys.exit(main())
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 201, in main
args_func(args, p)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 208, in args_func
args.func(args, p)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/common.py", line 612, in inner
return func(args, parser)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 50, in execute
install.install(args, parser, 'create')
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/install.py", line 255, in install
offline=args.offline)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/common.py", line 549, in get_index_trap
return get_index(_args, *_kwargs)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/api.py", line 42, in get_index
unknown=unknown)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/utils.py", line 119, in **call**
value = self.func(_args, *_kw)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 255, in fetch_index
reversed(channel_urls))
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 254, in <lambda>
use_cache=use_cache, session=session)),
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 65, in func
res = f(_args, *_kwargs)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 154, in fetch_repodata
handle_proxy_407(url, session)
File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 184, in handle_proxy_407
session.proxies[scheme], username, passwd)
KeyError: 'https'
Can you please help in this regard
| conda/fetch.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import bz2
import json
import shutil
import hashlib
import tempfile
from logging import getLogger
from os.path import basename, dirname, isdir, join
import sys
import getpass
import warnings
from functools import wraps
from conda import config
from conda.utils import memoized
from conda.connection import CondaSession, unparse_url, RETRIES
from conda.compat import itervalues, input, urllib_quote
from conda.lock import Locked
import requests
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(config.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2',
headers=headers, proxies=session.proxies,
verify=config.ssl_verify)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
(config.remove_binstar_tokens(url), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(config.DEFAULT_CHANNEL_ALIAS):
msg = ('Could not find Binstar user %s' %
config.remove_binstar_tokens(url).split(
config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0])
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % config.remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and config.rc.get('channel_alias',
config.DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to binstar again with "
"'binstar login' to access private packages(%s, %s)" %
(config.hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(config.remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, config.remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error %s\n" % e.message)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, config.remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % config.remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = config.remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index):
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for dn in os.listdir(pkgs_dir):
fn = dn + '.tar.bz2'
if fn in index:
continue
try:
with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if 'depends' not in meta:
meta['depends'] = []
log.debug("adding cached pkg to index: %s" % fn)
index[fn] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
index = {}
stdoutlog.info("Fetching package metadata: ")
session = CondaSession()
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(config.allowed_channels)))
try:
import concurrent.futures
from collections import OrderedDict
repodatas = []
with concurrent.futures.ThreadPoolExecutor(10) as executor:
future_to_url = OrderedDict([(executor.submit(
fetch_repodata, url, use_cache=use_cache,
session=session), url)
for url in reversed(channel_urls)])
for future in future_to_url:
url = future_to_url[future]
repodatas.append((url, future.result()))
except ImportError:
# concurrent.futures is only available in Python 3
repodatas = map(lambda url: (url, fetch_repodata(url,
use_cache=use_cache, session=session)),
reversed(channel_urls))
for url, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
add_unknown(index)
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
session = session or CondaSession()
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from conda.signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/') + '/') + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies,
verify=config.ssl_verify)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from conda.console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
| conda/fetch.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import bz2
import json
import shutil
import hashlib
import tempfile
from logging import getLogger
from os.path import basename, dirname, isdir, join
import sys
import getpass
import warnings
from functools import wraps
from conda import config
from conda.utils import memoized
from conda.connection import CondaSession, unparse_url, RETRIES
from conda.compat import itervalues, input, urllib_quote
from conda.lock import Locked
import requests
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(config.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2',
headers=headers, proxies=session.proxies,
verify=config.ssl_verify)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
(config.remove_binstar_tokens(url), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(config.DEFAULT_CHANNEL_ALIAS):
msg = ('Could not find Binstar user %s' %
config.remove_binstar_tokens(url).split(
config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0])
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % config.remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and config.rc.get('channel_alias',
config.DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to binstar again with "
"'binstar login' to access private packages(%s, %s)" %
(config.hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(config.remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, config.remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error %s\n" % e.message)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, config.remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % config.remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = config.remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
sys.exit("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/config.html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index):
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for dn in os.listdir(pkgs_dir):
fn = dn + '.tar.bz2'
if fn in index:
continue
try:
with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if 'depends' not in meta:
meta['depends'] = []
log.debug("adding cached pkg to index: %s" % fn)
index[fn] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
index = {}
stdoutlog.info("Fetching package metadata: ")
session = CondaSession()
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(config.allowed_channels)))
try:
import concurrent.futures
from collections import OrderedDict
repodatas = []
with concurrent.futures.ThreadPoolExecutor(10) as executor:
future_to_url = OrderedDict([(executor.submit(
fetch_repodata, url, use_cache=use_cache,
session=session), url)
for url in reversed(channel_urls)])
for future in future_to_url:
url = future_to_url[future]
repodatas.append((url, future.result()))
except ImportError:
# concurrent.futures is only available in Python 3
repodatas = map(lambda url: (url, fetch_repodata(url,
use_cache=use_cache, session=session)),
reversed(channel_urls))
for url, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
add_unknown(index)
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
session = session or CondaSession()
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from conda.signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/') + '/') + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies,
verify=config.ssl_verify)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from conda.console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
| conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -185,6 +185,10 @@ def handle_proxy_407(url, session):
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
+ if scheme not in session.proxies:
+ sys.exit("""Could not find a proxy for %r. See
+http://conda.pydata.org/docs/config.html#configure-conda-for-use-behind-a-proxy-server
+for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd) |
Set the default CondaSession.verify default from the config setting
Currently, the .condarc config setting must be explicitly used when making a request with a CondaSession object. This means that anyone who wants to use CondaSession must check the .condarc ssl_verify value and appropriately interpret it, etc. (For example, see https://github.com/conda/conda/blob/47e300b0e2cd5aad1dfe18d26eada5995b058004/conda/fetch.py#L101)
I think it would be much cleaner for CondaSession itself to set the default verify value in its **init**: `self.verify = <code to get the .condarc ssl_verify setting`. See https://github.com/kennethreitz/requests/blob/8b5e457b756b2ab4c02473f7a42c2e0201ecc7e9/requests/sessions.py#L314 to see that this is correct.
This change would mean that we don't need the verify argument here: https://github.com/conda/conda/blob/47e300b0e2cd5aad1dfe18d26eada5995b058004/conda/fetch.py#L101, and would also solve this issue: https://github.com/conda/conda-build/issues/523
| conda/connection.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from logging import getLogger
import re
import mimetypes
import os
import email
import base64
import ftplib
import cgi
from io import BytesIO
import tempfile
import conda
from conda.compat import urlparse, StringIO
from conda.config import get_proxy_servers
import requests
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = "conda/%s %s" % (
conda.__version__, self.headers['User-Agent'])
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with: conda install boto\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
try:
bucket = conn.get_bucket(bucket_name)
except boto.exception.S3ResponseError as exc:
resp.status_code = 404
resp.raw = exc
return resp
key = bucket.get_key(key_string)
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr,}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
<|code_end|>
conda/fetch.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import bz2
import json
import shutil
import hashlib
import tempfile
from logging import getLogger
from os.path import basename, dirname, isdir, join
import sys
import getpass
import warnings
from functools import wraps
from conda import config
from conda.utils import memoized
from conda.connection import CondaSession, unparse_url, RETRIES
from conda.compat import itervalues, input, urllib_quote
from conda.lock import Locked
import requests
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(config.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2',
headers=headers, proxies=session.proxies,
verify=config.ssl_verify)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
(config.remove_binstar_tokens(url), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(config.DEFAULT_CHANNEL_ALIAS):
msg = ('Could not find anaconda.org user %s' %
config.remove_binstar_tokens(url).split(
config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0])
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % config.remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and config.rc.get('channel_alias',
config.DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(config.hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(config.remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, config.remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error %s\n" % e.message)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, config.remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % config.remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = config.remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index):
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for dn in os.listdir(pkgs_dir):
fn = dn + '.tar.bz2'
if fn in index:
continue
try:
with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if 'depends' not in meta:
meta['depends'] = []
log.debug("adding cached pkg to index: %s" % fn)
index[fn] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
index = {}
stdoutlog.info("Fetching package metadata: ")
session = CondaSession()
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(config.allowed_channels)))
try:
import concurrent.futures
from collections import OrderedDict
repodatas = []
with concurrent.futures.ThreadPoolExecutor(10) as executor:
future_to_url = OrderedDict([(executor.submit(
fetch_repodata, url, use_cache=use_cache,
session=session), url)
for url in reversed(channel_urls)])
for future in future_to_url:
url = future_to_url[future]
repodatas.append((url, future.result()))
except ImportError:
# concurrent.futures is only available in Python 3
repodatas = map(lambda url: (url, fetch_repodata(url,
use_cache=use_cache, session=session)),
reversed(channel_urls))
for url, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
add_unknown(index)
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
session = session or CondaSession()
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from conda.signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/') + '/') + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies,
verify=config.ssl_verify)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from conda.console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
| conda/connection.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from logging import getLogger
import re
import mimetypes
import os
import email
import base64
import ftplib
import cgi
from io import BytesIO
import tempfile
import conda
from conda.compat import urlparse, StringIO
from conda.config import get_proxy_servers, ssl_verify
import requests
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = "conda/%s %s" % (
conda.__version__, self.headers['User-Agent'])
self.verify = ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with: conda install boto\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
try:
bucket = conn.get_bucket(bucket_name)
except boto.exception.S3ResponseError as exc:
resp.status_code = 404
resp.raw = exc
return resp
key = bucket.get_key(key_string)
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr,}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
conda/fetch.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import bz2
import json
import shutil
import hashlib
import tempfile
from logging import getLogger
from os.path import basename, dirname, isdir, join
import sys
import getpass
import warnings
from functools import wraps
from conda import config
from conda.utils import memoized
from conda.connection import CondaSession, unparse_url, RETRIES
from conda.compat import itervalues, input, urllib_quote
from conda.lock import Locked
import requests
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(config.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2',
headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
(config.remove_binstar_tokens(url), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(config.DEFAULT_CHANNEL_ALIAS):
msg = ('Could not find anaconda.org user %s' %
config.remove_binstar_tokens(url).split(
config.DEFAULT_CHANNEL_ALIAS)[1].split('/')[0])
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % config.remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and config.rc.get('channel_alias',
config.DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(config.hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(config.remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, config.remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error %s\n" % e.message)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, config.remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % config.remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = config.remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index):
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for dn in os.listdir(pkgs_dir):
fn = dn + '.tar.bz2'
if fn in index:
continue
try:
with open(join(pkgs_dir, dn, 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if 'depends' not in meta:
meta['depends'] = []
log.debug("adding cached pkg to index: %s" % fn)
index[fn] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
@memoized
def fetch_index(channel_urls, use_cache=False, unknown=False):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
index = {}
stdoutlog.info("Fetching package metadata: ")
session = CondaSession()
for url in reversed(channel_urls):
if config.allowed_channels and url not in config.allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(config.allowed_channels)))
try:
import concurrent.futures
from collections import OrderedDict
repodatas = []
with concurrent.futures.ThreadPoolExecutor(10) as executor:
future_to_url = OrderedDict([(executor.submit(
fetch_repodata, url, use_cache=use_cache,
session=session), url)
for url in reversed(channel_urls)])
for future in future_to_url:
url = future_to_url[future]
repodatas.append((url, future.result()))
except ImportError:
# concurrent.futures is only available in Python 3
repodatas = map(lambda url: (url, fetch_repodata(url,
use_cache=use_cache, session=session)),
reversed(channel_urls))
for url, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
for info in itervalues(new_index):
info['channel'] = url
index.update(new_index)
stdoutlog.info('\n')
if unknown:
add_unknown(index)
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
if dst_dir is None:
dst_dir = config.pkgs_dirs[0]
session = session or CondaSession()
fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
url = info['channel'] + fn
log.debug("url=%r" % url)
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from conda.signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/') + '/') + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not config.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
try:
with open(join(dst_dir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from conda.console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
| conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -19,7 +19,7 @@
import conda
from conda.compat import urlparse, StringIO
-from conda.config import get_proxy_servers
+from conda.config import get_proxy_servers, ssl_verify
import requests
@@ -82,6 +82,7 @@ def __init__(self, *args, **kwargs):
self.headers['User-Agent'] = "conda/%s %s" % (
conda.__version__, self.headers['User-Agent'])
+ self.verify = ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -97,8 +97,7 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
try:
resp = session.get(url + 'repodata.json.bz2',
- headers=headers, proxies=session.proxies,
- verify=config.ssl_verify)
+ headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
@@ -323,8 +322,7 @@ def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries = RETRIES
with Locked(dst_dir):
try:
- resp = session.get(url, stream=True, proxies=session.proxies,
- verify=config.ssl_verify)
+ resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required |
`conda.install.update_prefix` is modifying cached pkgs in Windows
Since files are hardlinks, it looks like conda's prefix replacement mechanism is breaking its own files.
File contents inside package:
``` python
x = r'/opt/anaconda1anaconda2anaconda3\Scripts',
```
File contents after installing in 'env1':
``` python
x = r'C:\Miniconda\envs\env1\Scripts',
```
File contents after installing in 'NOTENV1':
``` python
x = r'C:\Miniconda\envs\env1\Scripts',
```
Note that the second install fails, because the first one modified the cached files in `C:\Miniconda\pkgs`.
Reading @asmeurer comments in #679, I agree that the correct behavior in this case would be to delete the file being modified and re-create it to avoid this issue.
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import time
import os
import json
import errno
import shutil
import stat
import sys
import subprocess
import tarfile
import traceback
import logging
import shlex
from os.path import abspath, basename, dirname, isdir, isfile, islink, join
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
# on Windows we cannot update these packages in the root environment
# because of the file lock problem
win_ignore_root = set(['python'])
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def rm_rf(path, max_retries=5):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
os.unlink(path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
if abspath(prefix) != abspath(sys.prefix):
# we currently only want to create menu items for packages
# in default environment
return
menu_files = [f for f in files
if f.startswith('Menu/') and f.endswith('.json')]
if not menu_files:
return
try:
import menuinst
except ImportError:
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.lexists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.lexists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.lexists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.lexists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
if not isdir(prefix):
os.makedirs(prefix)
try:
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir should be the name of the directory in the trash
"""
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
if tempdir is None:
import tempfile
trash_dir = tempfile.mkdtemp(dir=trash_dir)
else:
trash_dir = join(trash_dir, tempdir)
try:
try:
os.makedirs(join(trash_dir, dirname(f)))
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
shutil.move(join(prefix, f), join(trash_dir, f))
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (f, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % f)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
if on_win:
# Try deleting the trash every time we link something.
delete_trash(prefix)
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_to_trash(prefix, f)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_to_trash(prefix, f)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dist) else
LINK_COPY)
link(pkgs_dir, prefix, dist, linktype)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import time
import os
import json
import errno
import shutil
import stat
import sys
import subprocess
import tarfile
import traceback
import logging
import shlex
from os.path import abspath, basename, dirname, isdir, isfile, islink, join
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
# on Windows we cannot update these packages in the root environment
# because of the file lock problem
win_ignore_root = set(['python'])
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def rm_rf(path, max_retries=5):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
os.unlink(path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
if abspath(prefix) != abspath(sys.prefix):
# we currently only want to create menu items for packages
# in default environment
return
menu_files = [f for f in files
if f.startswith('Menu/') and f.endswith('.json')]
if not menu_files:
return
try:
import menuinst
except ImportError:
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.lexists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.lexists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.lexists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.lexists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
if not isdir(prefix):
os.makedirs(prefix)
try:
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir should be the name of the directory in the trash
"""
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
if tempdir is None:
import tempfile
trash_dir = tempfile.mkdtemp(dir=trash_dir)
else:
trash_dir = join(trash_dir, tempdir)
try:
try:
os.makedirs(join(trash_dir, dirname(f)))
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
shutil.move(join(prefix, f), join(trash_dir, f))
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (f, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % f)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
if on_win:
# Try deleting the trash every time we link something.
delete_trash(prefix)
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_to_trash(prefix, f)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_to_trash(prefix, f)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dist) else
LINK_COPY)
link(pkgs_dir, prefix, dist, linktype)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -272,6 +272,7 @@ def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
if new_data == data:
return
st = os.lstat(path)
+ os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode)) |
recursion problem or infinite loop in dependency solver
I am executing this command:
``` bash
/opt/wakari/miniconda/bin/conda update --dry-run -c https://conda.anaconda.org/wakari/channel/release:0.8.0 -p /opt/wakari/wakari-server --all
```
And it appears to loop "forever" (well, for a few minutes, at least", stating again and again:
``` bash
Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping
Solving package specifications:
Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping
Solving package specifications:
Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping
Solving package specifications:
Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping
Solving package specifications:
Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping
Solving package specifications:
```
(you get the point), and then when I finally `CTRL-C` to abort, the stack-trace suggests a recursion problem:
``` python
Solving package specifications:
Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping
^CTraceback (most recent call last):
File "/opt/wakari/miniconda/bin/conda", line 5, in <module>
sys.exit(main())
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 194, in main
args_func(args, p)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 201, in args_func
args.func(args, p)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/main_update.py", line 38, in execute
install.install(args, parser, 'update')
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 358, in install
return install(args, parser, command=command)
... REPEATED MANY TIMES ...
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 358, in install
return install(args, parser, command=command)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 358, in install
return install(args, parser, command=command)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 337, in install
minimal_hint=args.alt_hint)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/plan.py", line 402, in install_actions
config.track_features, minimal_hint=minimal_hint):
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 726, in solve
for pkg in self.get_pkgs(ms, max_only=max_only):
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/utils.py", line 142, in __call__
res = cache[key] = self.func(*args, **kw)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 261, in get_pkgs
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 168, in __init__
self.norm_version = normalized_version(self.version)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 28, in normalized_version
return verlib.NormalizedVersion(version)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/verlib.py", line 93, in __init__
self._parse(s, error_on_huge_major_num)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/verlib.py", line 110, in _parse
block = self._parse_numdots(groups['version'], s, False, 2)
File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/verlib.py", line 159, in _parse_numdots
if len(n) > 1 and n[0] == '0':
KeyboardInterrupt
```
| conda/resolve.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from collections import defaultdict
from functools import partial
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints, evaluate_eq, minimal_unsatisfiable_subset, MaximumIterationsError)
from conda.console import setup_handlers
from conda import config
from conda.toposort import toposort
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
suggested_version = verlib.suggest_normalized_version(version)
if suggested_version:
return verlib.NormalizedVersion(suggested_version)
return version
class NoPackagesFound(RuntimeError):
def __init__(self, msg, pkgs):
super(NoPackagesFound, self).__init__(msg)
self.pkgs = pkgs
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3, repr(spec)
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
self.info = info
def _asdict(self):
result = self.info.copy()
result['fn'] = self.fn
result['norm_version'] = str(self.norm_version)
return result
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
if not 'depends' in self.index[fn]:
raise NoPackagesFound('Bad metadata for %s' % fn, [fn])
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise NoPackagesFound("No packages found in current %s channels matching: %s" % (config.subdir, ms), [ms.spec])
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) == \
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise NoPackagesFound("No packages found in current %s channels matching: %s" % (config.subdir, ms), [ms.spec])
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
found = False
notfound = []
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
found = True
continue
res[pkg2.fn] = pkg2
try:
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
except NoPackagesFound as e:
for pkg in e.pkgs:
if pkg not in notfound:
notfound.append(pkg)
if pkg2.fn in res:
del res[pkg2.fn]
else:
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies "
"for %s: %s" % (ms, ', '.join(notfound)), notfound)
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield (-v1, -v2)
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield tuple(clause)
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield tuple(clause)
# Don't install any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield (-v[fn],)
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield tuple(clause)
# Don't install any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield (-v[fn],)
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield tuple(clause)
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
found = False
notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
found = True
continue
try:
dists.update(self.all_deps(pkg.fn, max_only=max_only))
except NoPackagesFound as e:
# Ignore any package that has nonexisting dependencies.
for pkg in e.pkgs:
if pkg not in notfound:
notfound.append(pkg)
else:
dists[pkg.fn] = pkg
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), notfound)
return dists
def graph_sort(self, must_have):
def lookup(value):
index_data = self.index.get('%s.tar.bz2' % value, {})
return {item.split(' ', 1)[0] for item in index_data.get('depends', [])}
digraph = {}
for key, value in must_have.items():
depends = lookup(value)
digraph[key] = depends
sorted_keys = toposort(digraph)
must_have = must_have.copy()
# Take all of the items in the sorted keys
# Don't fail if the key does not exist
result = [must_have.pop(key) for key in sorted_keys if key in must_have]
# Take any key that were not sorted
result.extend(must_have.values())
return result
def solve2(self, specs, features, guess=True, alg='BDD',
returnall=False, minimal_hint=False, unsat_only=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
try:
dists = self.get_dists(specs, max_only=True)
except NoPackagesFound:
# Handle packages that are not included because some dependencies
# couldn't be found.
pass
else:
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = set(self.gen_clauses(v, dists, specs, features))
try:
solutions = min_sat(clauses, alg='iterate',
raise_on_max_n=True)
except MaximumIterationsError:
pass
else:
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = set(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
if minimal_hint:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating minimal hint: \n')
sys.exit(self.minimal_unsatisfiable_subset(clauses, v,
w))
else:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: \n')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
if unsat_only:
return True
log.debug("Using alg %s" % alg)
def version_constraints(lo, hi):
return set(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
evaluate_func = partial(evaluate_eq, eq)
constraints = bisect_constraints(0, max_rhs, clauses,
version_constraints, evaluate_func=evaluate_func)
# Only relevant for build_BDD
if constraints and false in constraints:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and true in constraints:
constraints = set([])
dotlog.debug("Finding the minimal solution")
try:
solutions = min_sat(clauses | constraints, N=m + 1, alg='iterate',
raise_on_max_n=True)
except MaximumIterationsError:
solutions = min_sat(clauses | constraints, N=m + 1, alg='sorter')
assert solutions, (specs, features)
if len(solutions) > 1:
stdoutlog.info('\nWarning: %s possible package resolutions (only showing differing packages):\n' % len(solutions))
pretty_solutions = [{w[lit] for lit in sol if 0 < lit <= m} for
sol in solutions]
common = set.intersection(*pretty_solutions)
for sol in pretty_solutions:
stdoutlog.info('\t%s,\n' % sorted(sol - common))
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
@staticmethod
def clause_pkg_name(i, w):
if i > 0:
ret = w[i]
else:
ret = 'not ' + w[-i]
return ret.rsplit('.tar.bz2', 1)[0]
def minimal_unsatisfiable_subset(self, clauses, v, w):
clauses = minimal_unsatisfiable_subset(clauses, log=True)
pretty_clauses = []
for clause in clauses:
if clause[0] < 0 and len(clause) > 1:
pretty_clauses.append('%s => %s' %
(self.clause_pkg_name(-clause[0], w), ' or '.join([self.clause_pkg_name(j, w) for j in clause[1:]])))
else:
pretty_clauses.append(' or '.join([self.clause_pkg_name(j, w) for j in clause]))
return "The following set of clauses is unsatisfiable:\n\n%s" % '\n'.join(pretty_clauses)
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
from conda.console import setup_verbose_handlers
setup_verbose_handlers()
# Don't show the dots from solve2 in normal mode but do show the
# dotlog messages with --debug
dotlog.setLevel(logging.WARN)
def sat(specs):
try:
self.solve2(specs, features, guess=False, unsat_only=True)
except RuntimeError:
return False
return True
hint = minimal_unsatisfiable_subset(specs, sat=sat, log=True)
if not hint:
return ''
if len(hint) == 1:
# TODO: Generate a hint from the dependencies.
ret = (("\nHint: '{0}' has unsatisfiable dependencies (see 'conda "
"info {0}')").format(hint[0].split()[0]))
else:
ret = """
Hint: the following packages conflict with each other:
- %s
Use 'conda info %s' etc. to see the dependencies for each package.""" % ('\n - '.join(hint), hint[0].split()[0])
if features:
ret += """
Note that the following features are enabled:
- %s
""" % ('\n - '.join(features))
return ret
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
if fn not in self.index:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.track_features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False,
minimal_hint=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features,
minimal_hint=minimal_hint)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
<|code_end|>
| conda/resolve.py
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from collections import defaultdict
from functools import partial
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints, evaluate_eq, minimal_unsatisfiable_subset, MaximumIterationsError)
from conda.console import setup_handlers
from conda import config
from conda.toposort import toposort
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
suggested_version = verlib.suggest_normalized_version(version)
if suggested_version:
return verlib.NormalizedVersion(suggested_version)
return version
class NoPackagesFound(RuntimeError):
def __init__(self, msg, pkgs):
super(NoPackagesFound, self).__init__(msg)
self.pkgs = pkgs
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3, repr(spec)
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
self.info = info
def _asdict(self):
result = self.info.copy()
result['fn'] = self.fn
result['norm_version'] = str(self.norm_version)
return result
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
if not 'depends' in self.index[fn]:
raise NoPackagesFound('Bad metadata for %s' % fn, [fn])
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise NoPackagesFound("No packages found in current %s channels matching: %s" % (config.subdir, ms), [ms.spec])
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) == \
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise NoPackagesFound("No packages found in current %s channels matching: %s" % (config.subdir, ms), [ms.spec])
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
found = False
notfound = []
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
found = True
continue
res[pkg2.fn] = pkg2
try:
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
except NoPackagesFound as e:
for pkg in e.pkgs:
if pkg not in notfound:
notfound.append(pkg)
if pkg2.fn in res:
del res[pkg2.fn]
else:
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies "
"for %s: %s" % (ms, ', '.join(notfound)), [ms.spec] + notfound)
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield (-v1, -v2)
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield tuple(clause)
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield tuple(clause)
# Don't install any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield (-v[fn],)
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield tuple(clause)
# Don't install any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield (-v[fn],)
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield tuple(clause)
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
found = False
notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
found = True
continue
try:
dists.update(self.all_deps(pkg.fn, max_only=max_only))
except NoPackagesFound as e:
# Ignore any package that has nonexisting dependencies.
for pkg in e.pkgs:
if pkg not in notfound:
notfound.append(pkg)
else:
dists[pkg.fn] = pkg
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), [spec] + notfound)
return dists
def graph_sort(self, must_have):
def lookup(value):
index_data = self.index.get('%s.tar.bz2' % value, {})
return {item.split(' ', 1)[0] for item in index_data.get('depends', [])}
digraph = {}
for key, value in must_have.items():
depends = lookup(value)
digraph[key] = depends
sorted_keys = toposort(digraph)
must_have = must_have.copy()
# Take all of the items in the sorted keys
# Don't fail if the key does not exist
result = [must_have.pop(key) for key in sorted_keys if key in must_have]
# Take any key that were not sorted
result.extend(must_have.values())
return result
def solve2(self, specs, features, guess=True, alg='BDD',
returnall=False, minimal_hint=False, unsat_only=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
try:
dists = self.get_dists(specs, max_only=True)
except NoPackagesFound:
# Handle packages that are not included because some dependencies
# couldn't be found.
pass
else:
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = set(self.gen_clauses(v, dists, specs, features))
try:
solutions = min_sat(clauses, alg='iterate',
raise_on_max_n=True)
except MaximumIterationsError:
pass
else:
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = set(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
if minimal_hint:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating minimal hint: \n')
sys.exit(self.minimal_unsatisfiable_subset(clauses, v,
w))
else:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: \n')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
if unsat_only:
return True
log.debug("Using alg %s" % alg)
def version_constraints(lo, hi):
return set(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
evaluate_func = partial(evaluate_eq, eq)
constraints = bisect_constraints(0, max_rhs, clauses,
version_constraints, evaluate_func=evaluate_func)
# Only relevant for build_BDD
if constraints and false in constraints:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and true in constraints:
constraints = set([])
dotlog.debug("Finding the minimal solution")
try:
solutions = min_sat(clauses | constraints, N=m + 1, alg='iterate',
raise_on_max_n=True)
except MaximumIterationsError:
solutions = min_sat(clauses | constraints, N=m + 1, alg='sorter')
assert solutions, (specs, features)
if len(solutions) > 1:
stdoutlog.info('\nWarning: %s possible package resolutions (only showing differing packages):\n' % len(solutions))
pretty_solutions = [{w[lit] for lit in sol if 0 < lit <= m} for
sol in solutions]
common = set.intersection(*pretty_solutions)
for sol in pretty_solutions:
stdoutlog.info('\t%s,\n' % sorted(sol - common))
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
@staticmethod
def clause_pkg_name(i, w):
if i > 0:
ret = w[i]
else:
ret = 'not ' + w[-i]
return ret.rsplit('.tar.bz2', 1)[0]
def minimal_unsatisfiable_subset(self, clauses, v, w):
clauses = minimal_unsatisfiable_subset(clauses, log=True)
pretty_clauses = []
for clause in clauses:
if clause[0] < 0 and len(clause) > 1:
pretty_clauses.append('%s => %s' %
(self.clause_pkg_name(-clause[0], w), ' or '.join([self.clause_pkg_name(j, w) for j in clause[1:]])))
else:
pretty_clauses.append(' or '.join([self.clause_pkg_name(j, w) for j in clause]))
return "The following set of clauses is unsatisfiable:\n\n%s" % '\n'.join(pretty_clauses)
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
from conda.console import setup_verbose_handlers
setup_verbose_handlers()
# Don't show the dots from solve2 in normal mode but do show the
# dotlog messages with --debug
dotlog.setLevel(logging.WARN)
def sat(specs):
try:
self.solve2(specs, features, guess=False, unsat_only=True)
except RuntimeError:
return False
return True
hint = minimal_unsatisfiable_subset(specs, sat=sat, log=True)
if not hint:
return ''
if len(hint) == 1:
# TODO: Generate a hint from the dependencies.
ret = (("\nHint: '{0}' has unsatisfiable dependencies (see 'conda "
"info {0}')").format(hint[0].split()[0]))
else:
ret = """
Hint: the following packages conflict with each other:
- %s
Use 'conda info %s' etc. to see the dependencies for each package.""" % ('\n - '.join(hint), hint[0].split()[0])
if features:
ret += """
Note that the following features are enabled:
- %s
""" % ('\n - '.join(features))
return ret
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
if fn not in self.index:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.track_features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False,
minimal_hint=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features,
minimal_hint=minimal_hint)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
| conda/resolve.py
--- a/conda/resolve.py
+++ b/conda/resolve.py
@@ -309,7 +309,7 @@ def add_dependents(fn1, max_only=False):
if not found:
raise NoPackagesFound("Could not find some dependencies "
- "for %s: %s" % (ms, ', '.join(notfound)), notfound)
+ "for %s: %s" % (ms, ', '.join(notfound)), [ms.spec] + notfound)
add_dependents(root_fn, max_only=max_only)
return res
@@ -426,7 +426,7 @@ def get_dists(self, specs, max_only=False):
dists[pkg.fn] = pkg
found = True
if not found:
- raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), notfound)
+ raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), [spec] + notfound)
return dists
|
create_default_packages rules out --clone
Hi all,
In case of the .condarc file defining the 'create_default_packages' options, the conda --clone command gives a confusing error messages:
alain@alain-K53E:~$ conda create --name flowersqq --clone snowflakes
Fetching package metadata: ....
Error: did not expect any arguments for --clone
alain@alain-K53E:~$
Here my settings:
lain@alain-K53E:~$ conda info
Current conda install:
```
platform : linux-32
conda version : 3.16.0
conda-build version : 1.16.0
python version : 2.7.10.final.0
requests version : 2.7.0
root environment : /home/alain/miniconda (writable)
default environment : /home/alain/miniconda
envs directories : /home/alain/test/conda-envs
package cache : /home/alain/test/conda-envs/.pkgs
channel URLs : https://repo.continuum.io/pkgs/free/linux-32/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-32/
https://repo.continuum.io/pkgs/pro/noarch/
config file : /home/alain/.condarc_env
is foreign system : False
```
alain@alain-K53E:~$
alain@alain-K53E:~$ cat /home/alain/.condarc_env
channels:
- defaults
# Directories in which environments are located.
envs_dirs:
- ~/test/conda-envs
create_default_packages:
- python
- pip
binstar_upload: False
binstar_personal: True
alain@alain-K53E:~$
And here the workaround for me (luckily i was able to browse the source code: https://github.com/algorete/apkg/blob/master/conda/cli/install.py, this gave me a hint, but i do not really understand..)
alain@alain-K53E:~$ conda create --name flowersqq --clone snowflakes --no-default-packages
Fetching package metadata: ....
src_prefix: '/home/alain/test/conda-envs/snowflakes'
dst_prefix: '/home/alain/test/conda-envs/flowersqq'
Packages: 14
Files: 0
Linking packages ...
[ COMPLETE ]|#################################################################################################| 100%
alain@alain-K53E:~$
Kind Regards
Alain
P.S: I could understand that changing the .condrc file to include default packages _after_ having created an env which did not define the create_default_packages could confuse the '--clone', but the problem seems always present.
| conda/cli/install.py
<|code_start|>
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import tarfile
import tempfile
from os.path import isdir, join, basename, exists, abspath
from difflib import get_close_matches
import logging
import errno
import conda.config as config
import conda.plan as plan
import conda.instructions as inst
import conda.misc as misc
from conda.api import get_index
from conda.cli import common
from conda.cli.find_commands import find_executable
from conda.resolve import NoPackagesFound, Resolve, MatchSpec
import conda.install as ci
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
sys.exit("File does not exist: %s" % tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
depends = misc.install_local_packages(prefix, paths, verbose=verbose)
shutil.rmtree(tmp_dir)
return depends
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == config.root_env_name:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
error = "prefix already exists: %s" % prefix
if error:
common.error_and_exit(error, json=json, error_type="ValueError")
def clone(src_arg, dst_prefix, json=False, quiet=False, index=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
common.error_and_exit('no such directory: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
else:
src_prefix = common.find_prefix_name(src_arg)
if src_prefix is None:
common.error_and_exit('could not find environment: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
if not json:
print("src_prefix: %r" % src_prefix)
print("dst_prefix: %r" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = misc.clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet, index=index)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
print("#")
print("# To activate this environment, use:")
if sys.platform == 'win32':
print("# > activate %s" % arg)
else:
print("# $ source activate %s" % arg)
print("#")
print("# To deactivate this environment, use:")
print("# $ source deactivate")
print("#")
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
common.error_and_exit("expected revision number, not: '%s'" % arg,
json=json,
error_type="ValueError")
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
newenv = bool(command == 'create')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = common.get_prefix(args, search=not newenv)
if newenv:
check_prefix(prefix, json=args.json)
if config.force_32bit and plan.is_root_prefix(prefix):
common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env")
if command == 'update':
if args.all:
if args.packages:
common.error_and_exit("""--all cannot be used with packages""",
json=args.json,
error_type="ValueError")
else:
if len(args.packages) == 0:
common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
json=args.json,
error_type="ValueError")
if command == 'update':
linked = ci.linked(prefix)
for name in args.packages:
common.arg2spec(name, json=args.json)
if '=' in name:
common.error_and_exit("Invalid package name: '%s'" % (name),
json=args.json,
error_type="ValueError")
if name not in set(ci.name_dist(d) for d in linked):
common.error_and_exit("package '%s' is not installed in %s" %
(name, prefix),
json=args.json,
error_type="ValueError")
if newenv and not args.no_default_packages:
default_packages = config.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in config.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
specs = []
if args.file:
specs.extend(common.specs_from_url(args.file, json=args.json))
elif getattr(args, 'all', False):
linked = ci.linked(prefix)
if not linked:
common.error_and_exit("There are no packages installed in the "
"prefix %s" % prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
if name in getattr(args, '_skip', ['anaconda']):
continue
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s' % name)
specs.extend(common.specs_from_args(args.packages, json=args.json))
if command == 'install' and args.revision:
get_revision(args.revision, json=args.json)
elif not (newenv and args.clone):
common.check_specs(prefix, specs, json=args.json,
create=(command == 'create'))
# handle tar file containing conda packages
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
depends = misc.install_local_packages(prefix, args.packages,
verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
else:
common.error_and_exit(
"cannot mix specifications with conda package filenames",
json=args.json,
error_type="ValueError")
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
depends = install_tar(prefix, tar_path, verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit(
"you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
if exists(croot):
channel_urls = [url_path(croot)] + list(channel_urls)
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown,
json=args.json,
offline=args.offline)
if newenv and args.clone:
if args.packages:
common.error_and_exit('did not expect any arguments for --clone',
json=args.json,
error_type="ValueError")
clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index)
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
return
# Don't update packages that are already up-to-date
if command == 'update' and not (args.all or args.force):
r = Resolve(index)
orig_packages = args.packages[:]
for name in orig_packages:
installed_metadata = [ci.is_linked(prefix, dist)
for dist in linked]
vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
if dist.rsplit('-', 2)[0] == name]
build_inst = [m['build_number'] for m in installed_metadata if
m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
except AssertionError as e:
if args.json:
common.exception_and_exit(e, json=True)
else:
raise
pkgs = sorted(r.get_pkgs(MatchSpec(name)))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if (latest.version == vers_inst[0] and
latest.build_number == build_inst[0]):
args.packages.remove(name)
if not args.packages:
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
spec_names = set(s.split()[0] for s in specs)
if args.no_deps:
only_names = spec_names
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
common.error_and_exit("Error: could not create directory: %s" % prefix,
json=args.json,
error_type="OSError")
else:
common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
json=args.json,
error_type="NoEnvironmentFound")
try:
if command == 'install' and args.revision:
actions = plan.revert_actions(prefix, get_revision(args.revision))
else:
with common.json_progress_bars(json=args.json and not args.quiet):
actions = plan.install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
minimal_hint=args.alt_hint)
if args.copy:
new_link = []
for pkg in actions["LINK"]:
dist, pkgs_dir, lt = inst.split_linkarg(pkg)
lt = ci.LINK_COPY
new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
actions["LINK"] = new_link
except NoPackagesFound as e:
error_message = e.args[0]
if command == 'update' and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not args.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
args._skip.extend([i.split()[0] for i in e.pkgs])
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
for pkg in e.pkgs:
close = get_close_matches(pkg, packages, cutoff=0.7)
if close:
error_message += ("\n\nDid you mean one of these?"
"\n\n %s" % (', '.join(close)))
error_message += '\n\nYou can search for this package on anaconda.org with'
error_message += '\n\n anaconda search -t conda %s' % pkg
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message += '\n\n (and similarly for the other packages)'
if not find_executable('anaconda', include_others=False):
error_message += '\n\nYou may need to install the anaconda-client command line client with'
error_message += '\n\n conda install anaconda-client'
common.error_and_exit(error_message, json=args.json)
except SystemExit as e:
# Unsatisfiable package specifications/no such revision/import error
error_type = 'UnsatisfiableSpecifications'
if e.args and 'could not import' in e.args[0]:
error_type = 'ImportError'
common.exception_and_exit(e, json=args.json, newline=True,
error_text=False,
error_type=error_type)
if plan.nothing_to_do(actions):
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(spec_names)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if not args.json:
print()
print("Package plan for installation in environment %s:" % prefix)
plan.display_actions(actions, index)
if command in {'install', 'update'}:
common.check_write(command, prefix)
if not args.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
sys.exit(0)
with common.json_progress_bars(json=args.json and not args.quiet):
try:
plan.execute_actions(actions, index, verbose=not args.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
error_type = "AlreadyLocked"
else:
error_type = "RuntimeError"
common.exception_and_exit(e, error_type=error_type, json=args.json)
except SystemExit as e:
common.exception_and_exit(e, json=args.json)
if newenv:
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
if args.json:
common.stdout_json_success(actions=actions)
def check_install(packages, platform=None, channel_urls=(), prepend=True,
minimal_hint=False):
try:
prefix = tempfile.mkdtemp('conda')
specs = common.specs_from_args(packages)
index = get_index(channel_urls=channel_urls, prepend=prepend,
platform=platform)
actions = plan.install_actions(prefix, index, specs, pinned=False,
minimal_hint=minimal_hint)
plan.display_actions(actions, index)
return actions
finally:
ci.rm_rf(prefix)
<|code_end|>
| conda/cli/install.py
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import tarfile
import tempfile
from os.path import isdir, join, basename, exists, abspath
from difflib import get_close_matches
import logging
import errno
import conda.config as config
import conda.plan as plan
import conda.instructions as inst
import conda.misc as misc
from conda.api import get_index
from conda.cli import common
from conda.cli.find_commands import find_executable
from conda.resolve import NoPackagesFound, Resolve, MatchSpec
import conda.install as ci
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
from conda.misc import install_local_packages
if not exists(tar_path):
sys.exit("File does not exist: %s" % tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
depends = install_local_packages(prefix, paths, verbose=verbose)
shutil.rmtree(tmp_dir)
return depends
def check_prefix(prefix, json=False):
from conda.config import root_env_name
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == root_env_name:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
error = "prefix already exists: %s" % prefix
if error:
common.error_and_exit(error, json=json, error_type="ValueError")
def clone(src_arg, dst_prefix, json=False, quiet=False, index=None):
from conda.misc import clone_env
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
common.error_and_exit('no such directory: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
else:
src_prefix = common.find_prefix_name(src_arg)
if src_prefix is None:
common.error_and_exit('could not find environment: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
if not json:
print("src_prefix: %r" % src_prefix)
print("dst_prefix: %r" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet, index=index)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
print("#")
print("# To activate this environment, use:")
if sys.platform == 'win32':
print("# > activate %s" % arg)
else:
print("# $ source activate %s" % arg)
print("#")
print("# To deactivate this environment, use:")
print("# $ source deactivate")
print("#")
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
common.error_and_exit("expected revision number, not: '%s'" % arg,
json=json,
error_type="ValueError")
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
newenv = bool(command == 'create')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = common.get_prefix(args, search=not newenv)
if newenv:
check_prefix(prefix, json=args.json)
if command == 'update':
if args.all:
if args.packages:
common.error_and_exit("""--all cannot be used with packages""",
json=args.json,
error_type="ValueError")
else:
if len(args.packages) == 0:
common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
json=args.json,
error_type="ValueError")
if command == 'update':
linked = ci.linked(prefix)
for name in args.packages:
common.arg2spec(name, json=args.json)
if '=' in name:
common.error_and_exit("Invalid package name: '%s'" % (name),
json=args.json,
error_type="ValueError")
if name not in set(ci.name_dist(d) for d in linked):
common.error_and_exit("package '%s' is not installed in %s" %
(name, prefix),
json=args.json,
error_type="ValueError")
if newenv and not args.no_default_packages:
default_packages = config.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in config.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
specs = []
if args.file:
specs.extend(common.specs_from_url(args.file, json=args.json))
elif getattr(args, 'all', False):
linked = ci.linked(prefix)
if not linked:
common.error_and_exit("There are no packages installed in the "
"prefix %s" % prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
if name in getattr(args, '_skip', ['anaconda']):
continue
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s' % name)
specs.extend(common.specs_from_args(args.packages, json=args.json))
if command == 'install' and args.revision:
get_revision(args.revision, json=args.json)
elif not (newenv and args.clone):
common.check_specs(prefix, specs, json=args.json,
create=(command == 'create'))
# handle tar file containing conda packages
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
from conda.misc import install_local_packages
depends = install_local_packages(prefix, args.packages,
verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
else:
common.error_and_exit(
"cannot mix specifications with conda package filenames",
json=args.json,
error_type="ValueError")
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
depends = install_tar(prefix, tar_path, verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit(
"you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
if exists(croot):
channel_urls = [url_path(croot)] + list(channel_urls)
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown,
json=args.json,
offline=args.offline)
if newenv and args.clone:
if set(args.packages) - set(default_packages):
common.error_and_exit('did not expect any arguments for --clone',
json=args.json,
error_type="ValueError")
clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index)
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
return
# Don't update packages that are already up-to-date
if command == 'update' and not (args.all or args.force):
r = Resolve(index)
orig_packages = args.packages[:]
for name in orig_packages:
installed_metadata = [ci.is_linked(prefix, dist)
for dist in linked]
vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
if dist.rsplit('-', 2)[0] == name]
build_inst = [m['build_number'] for m in installed_metadata if
m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
except AssertionError as e:
if args.json:
common.exception_and_exit(e, json=True)
else:
raise
pkgs = sorted(r.get_pkgs(MatchSpec(name)))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if (latest.version == vers_inst[0] and
latest.build_number == build_inst[0]):
args.packages.remove(name)
if not args.packages:
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
spec_names = set(s.split()[0] for s in specs)
if args.no_deps:
only_names = spec_names
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
common.error_and_exit("Error: could not create directory: %s" % prefix,
json=args.json,
error_type="OSError")
else:
common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
json=args.json,
error_type="NoEnvironmentFound")
try:
if command == 'install' and args.revision:
actions = plan.revert_actions(prefix, get_revision(args.revision))
else:
with common.json_progress_bars(json=args.json and not args.quiet):
actions = plan.install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
minimal_hint=args.alt_hint)
if args.copy:
new_link = []
for pkg in actions["LINK"]:
dist, pkgs_dir, lt = inst.split_linkarg(pkg)
lt = ci.LINK_COPY
new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
actions["LINK"] = new_link
except NoPackagesFound as e:
error_message = e.args[0]
if command == 'update' and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not args.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
args._skip.extend([i.split()[0] for i in e.pkgs])
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
for pkg in e.pkgs:
close = get_close_matches(pkg, packages, cutoff=0.7)
if close:
error_message += ("\n\nDid you mean one of these?"
"\n\n %s" % (', '.join(close)))
error_message += '\n\nYou can search for this package on anaconda.org with'
error_message += '\n\n anaconda search -t conda %s' % pkg
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message += '\n\n (and similarly for the other packages)'
if not find_executable('anaconda', include_others=False):
error_message += '\n\nYou may need to install the anaconda-client command line client with'
error_message += '\n\n conda install anaconda-client'
common.error_and_exit(error_message, json=args.json)
except SystemExit as e:
# Unsatisfiable package specifications/no such revision/import error
error_type = 'UnsatisfiableSpecifications'
if e.args and 'could not import' in e.args[0]:
error_type = 'ImportError'
common.exception_and_exit(e, json=args.json, newline=True,
error_text=False,
error_type=error_type)
if plan.nothing_to_do(actions):
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(spec_names)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if not args.json:
print()
print("Package plan for installation in environment %s:" % prefix)
plan.display_actions(actions, index)
if command in {'install', 'update'}:
common.check_write(command, prefix)
if not args.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
sys.exit(0)
with common.json_progress_bars(json=args.json and not args.quiet):
try:
plan.execute_actions(actions, index, verbose=not args.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
error_type = "AlreadyLocked"
else:
error_type = "RuntimeError"
common.exception_and_exit(e, error_type=error_type, json=args.json)
except SystemExit as e:
common.exception_and_exit(e, json=args.json)
if newenv:
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
if args.json:
common.stdout_json_success(actions=actions)
def check_install(packages, platform=None, channel_urls=(), prepend=True,
minimal_hint=False):
try:
prefix = tempfile.mkdtemp('conda')
specs = common.specs_from_args(packages)
index = get_index(channel_urls=channel_urls, prepend=prepend,
platform=platform)
actions = plan.install_actions(prefix, index, specs, pinned=False,
minimal_hint=minimal_hint)
plan.display_actions(actions, index)
return actions
finally:
ci.rm_rf(prefix)
| conda/cli/install.py
--- a/conda/cli/install.py
+++ b/conda/cli/install.py
@@ -169,6 +169,8 @@ def install(args, parser, command='install'):
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
+ else:
+ default_packages = []
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
@@ -244,7 +246,7 @@ def install(args, parser, command='install'):
offline=args.offline)
if newenv and args.clone:
- if args.packages:
+ if set(args.packages) - set(default_packages):
common.error_and_exit('did not expect any arguments for --clone',
json=args.json,
error_type="ValueError") |
conda install --no-deps still installing deps when installing tarball
When running the following command:
`conda install --no-deps ./matplotlib-1.4.0-np18py27_0.tar.bz2`
You would assume that no dependencies are installed, but conda seems to install the dependencies anyway.
| conda/cli/install.py
<|code_start|>
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import tarfile
import tempfile
from os.path import isdir, join, basename, exists, abspath
from difflib import get_close_matches
import logging
import errno
import conda.config as config
import conda.plan as plan
import conda.instructions as inst
import conda.misc as misc
from conda.api import get_index
from conda.cli import common
from conda.cli.find_commands import find_executable
from conda.resolve import NoPackagesFound, Resolve, MatchSpec
import conda.install as ci
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
sys.exit("File does not exist: %s" % tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
depends = misc.install_local_packages(prefix, paths, verbose=verbose)
shutil.rmtree(tmp_dir)
return depends
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == config.root_env_name:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
error = "prefix already exists: %s" % prefix
if error:
common.error_and_exit(error, json=json, error_type="ValueError")
def clone(src_arg, dst_prefix, json=False, quiet=False, index=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
common.error_and_exit('no such directory: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
else:
src_prefix = common.find_prefix_name(src_arg)
if src_prefix is None:
common.error_and_exit('could not find environment: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
if not json:
print("src_prefix: %r" % src_prefix)
print("dst_prefix: %r" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = misc.clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet, index=index)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
print("#")
print("# To activate this environment, use:")
if sys.platform == 'win32':
print("# > activate %s" % arg)
else:
print("# $ source activate %s" % arg)
print("#")
print("# To deactivate this environment, use:")
print("# $ source deactivate")
print("#")
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
common.error_and_exit("expected revision number, not: '%s'" % arg,
json=json,
error_type="ValueError")
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
newenv = bool(command == 'create')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = common.get_prefix(args, search=not newenv)
if newenv:
check_prefix(prefix, json=args.json)
if config.force_32bit and plan.is_root_prefix(prefix):
common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env")
if command == 'update':
if args.all:
if args.packages:
common.error_and_exit("""--all cannot be used with packages""",
json=args.json,
error_type="ValueError")
else:
if len(args.packages) == 0:
common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
json=args.json,
error_type="ValueError")
if command == 'update':
linked = ci.linked(prefix)
for name in args.packages:
common.arg2spec(name, json=args.json)
if '=' in name:
common.error_and_exit("Invalid package name: '%s'" % (name),
json=args.json,
error_type="ValueError")
if name not in set(ci.name_dist(d) for d in linked):
common.error_and_exit("package '%s' is not installed in %s" %
(name, prefix),
json=args.json,
error_type="ValueError")
if newenv and not args.no_default_packages:
default_packages = config.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in config.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
specs = []
if args.file:
specs.extend(common.specs_from_url(args.file, json=args.json))
elif getattr(args, 'all', False):
linked = ci.linked(prefix)
if not linked:
common.error_and_exit("There are no packages installed in the "
"prefix %s" % prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
if name in getattr(args, '_skip', ['anaconda']):
continue
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s' % name)
specs.extend(common.specs_from_args(args.packages, json=args.json))
if command == 'install' and args.revision:
get_revision(args.revision, json=args.json)
elif not (newenv and args.clone):
common.check_specs(prefix, specs, json=args.json,
create=(command == 'create'))
# handle tar file containing conda packages
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
depends = misc.install_local_packages(prefix, args.packages,
verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
else:
common.error_and_exit(
"cannot mix specifications with conda package filenames",
json=args.json,
error_type="ValueError")
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
depends = install_tar(prefix, tar_path, verbose=not args.quiet)
specs = list(set(depends))
args.unknown = True
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit(
"you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
if exists(croot):
channel_urls = [url_path(croot)] + list(channel_urls)
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown,
json=args.json,
offline=args.offline)
if newenv and args.clone:
if set(args.packages) - set(default_packages):
common.error_and_exit('did not expect any arguments for --clone',
json=args.json,
error_type="ValueError")
clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index)
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
return
# Don't update packages that are already up-to-date
if command == 'update' and not (args.all or args.force):
r = Resolve(index)
orig_packages = args.packages[:]
for name in orig_packages:
installed_metadata = [ci.is_linked(prefix, dist)
for dist in linked]
vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
if dist.rsplit('-', 2)[0] == name]
build_inst = [m['build_number'] for m in installed_metadata if
m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
except AssertionError as e:
if args.json:
common.exception_and_exit(e, json=True)
else:
raise
pkgs = sorted(r.get_pkgs(MatchSpec(name)))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if (latest.version == vers_inst[0] and
latest.build_number == build_inst[0]):
args.packages.remove(name)
if not args.packages:
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
spec_names = set(s.split()[0] for s in specs)
if args.no_deps:
only_names = spec_names
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
common.error_and_exit("Error: could not create directory: %s" % prefix,
json=args.json,
error_type="OSError")
else:
common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
json=args.json,
error_type="NoEnvironmentFound")
try:
if command == 'install' and args.revision:
actions = plan.revert_actions(prefix, get_revision(args.revision))
else:
with common.json_progress_bars(json=args.json and not args.quiet):
actions = plan.install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
minimal_hint=args.alt_hint)
if args.copy:
new_link = []
for pkg in actions["LINK"]:
dist, pkgs_dir, lt = inst.split_linkarg(pkg)
lt = ci.LINK_COPY
new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
actions["LINK"] = new_link
except NoPackagesFound as e:
error_message = e.args[0]
if command == 'update' and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not args.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
args._skip.extend([i.split()[0] for i in e.pkgs])
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
for pkg in e.pkgs:
close = get_close_matches(pkg, packages, cutoff=0.7)
if close:
error_message += ("\n\nDid you mean one of these?"
"\n\n %s" % (', '.join(close)))
error_message += '\n\nYou can search for this package on anaconda.org with'
error_message += '\n\n anaconda search -t conda %s' % pkg
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message += '\n\n (and similarly for the other packages)'
if not find_executable('anaconda', include_others=False):
error_message += '\n\nYou may need to install the anaconda-client command line client with'
error_message += '\n\n conda install anaconda-client'
pinned_specs = plan.get_pinned_specs(prefix)
if pinned_specs:
error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned')
error_message += "\n\n %r" % pinned_specs
common.error_and_exit(error_message, json=args.json)
except SystemExit as e:
# Unsatisfiable package specifications/no such revision/import error
error_type = 'UnsatisfiableSpecifications'
if e.args and 'could not import' in e.args[0]:
error_type = 'ImportError'
common.exception_and_exit(e, json=args.json, newline=True,
error_text=False,
error_type=error_type)
if plan.nothing_to_do(actions):
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(spec_names)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if not args.json:
print()
print("Package plan for installation in environment %s:" % prefix)
plan.display_actions(actions, index)
if command in {'install', 'update'}:
common.check_write(command, prefix)
if not args.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
sys.exit(0)
with common.json_progress_bars(json=args.json and not args.quiet):
try:
plan.execute_actions(actions, index, verbose=not args.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
error_type = "AlreadyLocked"
else:
error_type = "RuntimeError"
common.exception_and_exit(e, error_type=error_type, json=args.json)
except SystemExit as e:
common.exception_and_exit(e, json=args.json)
if newenv:
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
if args.json:
common.stdout_json_success(actions=actions)
def check_install(packages, platform=None, channel_urls=(), prepend=True,
minimal_hint=False):
try:
prefix = tempfile.mkdtemp('conda')
specs = common.specs_from_args(packages)
index = get_index(channel_urls=channel_urls, prepend=prepend,
platform=platform)
actions = plan.install_actions(prefix, index, specs, pinned=False,
minimal_hint=minimal_hint)
plan.display_actions(actions, index)
return actions
finally:
ci.rm_rf(prefix)
<|code_end|>
| conda/cli/install.py
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
import shutil
import tarfile
import tempfile
from os.path import isdir, join, basename, exists, abspath
from difflib import get_close_matches
import logging
import errno
import conda.config as config
import conda.plan as plan
import conda.instructions as inst
import conda.misc as misc
from conda.api import get_index
from conda.cli import common
from conda.cli.find_commands import find_executable
from conda.resolve import NoPackagesFound, Resolve, MatchSpec
import conda.install as ci
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
sys.exit("File does not exist: %s" % tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
depends = misc.install_local_packages(prefix, paths, verbose=verbose)
shutil.rmtree(tmp_dir)
return depends
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == config.root_env_name:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
error = "prefix already exists: %s" % prefix
if error:
common.error_and_exit(error, json=json, error_type="ValueError")
def clone(src_arg, dst_prefix, json=False, quiet=False, index=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
common.error_and_exit('no such directory: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
else:
src_prefix = common.find_prefix_name(src_arg)
if src_prefix is None:
common.error_and_exit('could not find environment: %s' % src_arg,
json=json,
error_type="NoEnvironmentFound")
if not json:
print("src_prefix: %r" % src_prefix)
print("dst_prefix: %r" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = misc.clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet, index=index)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
print("#")
print("# To activate this environment, use:")
if sys.platform == 'win32':
print("# > activate %s" % arg)
else:
print("# $ source activate %s" % arg)
print("#")
print("# To deactivate this environment, use:")
print("# $ source deactivate")
print("#")
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
common.error_and_exit("expected revision number, not: '%s'" % arg,
json=json,
error_type="ValueError")
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
newenv = bool(command == 'create')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = common.get_prefix(args, search=not newenv)
if newenv:
check_prefix(prefix, json=args.json)
if config.force_32bit and plan.is_root_prefix(prefix):
common.error_and_exit("cannot use CONDA_FORCE_32BIT=1 in root env")
if command == 'update':
if args.all:
if args.packages:
common.error_and_exit("""--all cannot be used with packages""",
json=args.json,
error_type="ValueError")
else:
if len(args.packages) == 0:
common.error_and_exit("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix,
json=args.json,
error_type="ValueError")
if command == 'update':
linked = ci.linked(prefix)
for name in args.packages:
common.arg2spec(name, json=args.json)
if '=' in name:
common.error_and_exit("Invalid package name: '%s'" % (name),
json=args.json,
error_type="ValueError")
if name not in set(ci.name_dist(d) for d in linked):
common.error_and_exit("package '%s' is not installed in %s" %
(name, prefix),
json=args.json,
error_type="ValueError")
if newenv and not args.no_default_packages:
default_packages = config.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in config.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
specs = []
if args.file:
specs.extend(common.specs_from_url(args.file, json=args.json))
elif getattr(args, 'all', False):
linked = ci.linked(prefix)
if not linked:
common.error_and_exit("There are no packages installed in the "
"prefix %s" % prefix)
for pkg in linked:
name, ver, build = pkg.rsplit('-', 2)
if name in getattr(args, '_skip', ['anaconda']):
continue
if name == 'python' and ver.startswith('2'):
# Oh Python 2...
specs.append('%s >=%s,<3' % (name, ver))
else:
specs.append('%s' % name)
specs.extend(common.specs_from_args(args.packages, json=args.json))
if command == 'install' and args.revision:
get_revision(args.revision, json=args.json)
elif not (newenv and args.clone):
common.check_specs(prefix, specs, json=args.json,
create=(command == 'create'))
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
depends = misc.install_local_packages(prefix, args.packages,
verbose=not args.quiet)
if args.no_deps:
depends = []
specs = list(set(depends))
args.unknown = True
else:
common.error_and_exit(
"cannot mix specifications with conda package filenames",
json=args.json,
error_type="ValueError")
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
depends = install_tar(prefix, tar_path, verbose=not args.quiet)
if args.no_deps:
depends = []
specs = list(set(depends))
args.unknown = True
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit(
"you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
if exists(croot):
channel_urls = [url_path(croot)] + list(channel_urls)
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
unknown=args.unknown,
json=args.json,
offline=args.offline)
if newenv and args.clone:
if set(args.packages) - set(default_packages):
common.error_and_exit('did not expect any arguments for --clone',
json=args.json,
error_type="ValueError")
clone(args.clone, prefix, json=args.json, quiet=args.quiet, index=index)
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
return
# Don't update packages that are already up-to-date
if command == 'update' and not (args.all or args.force):
r = Resolve(index)
orig_packages = args.packages[:]
for name in orig_packages:
installed_metadata = [ci.is_linked(prefix, dist)
for dist in linked]
vers_inst = [dist.rsplit('-', 2)[1] for dist in linked
if dist.rsplit('-', 2)[0] == name]
build_inst = [m['build_number'] for m in installed_metadata if
m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
except AssertionError as e:
if args.json:
common.exception_and_exit(e, json=True)
else:
raise
pkgs = sorted(r.get_pkgs(MatchSpec(name)))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if (latest.version == vers_inst[0] and
latest.build_number == build_inst[0]):
args.packages.remove(name)
if not args.packages:
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
spec_names = set(s.split()[0] for s in specs)
if args.no_deps:
only_names = spec_names
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
common.error_and_exit("Error: could not create directory: %s" % prefix,
json=args.json,
error_type="OSError")
else:
common.error_and_exit("""\
environment does not exist: %s
#
# Use 'conda create' to create an environment before installing packages
# into it.
#""" % prefix,
json=args.json,
error_type="NoEnvironmentFound")
try:
if command == 'install' and args.revision:
actions = plan.revert_actions(prefix, get_revision(args.revision))
else:
with common.json_progress_bars(json=args.json and not args.quiet):
actions = plan.install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
minimal_hint=args.alt_hint)
if args.copy:
new_link = []
for pkg in actions["LINK"]:
dist, pkgs_dir, lt = inst.split_linkarg(pkg)
lt = ci.LINK_COPY
new_link.append("%s %s %d" % (dist, pkgs_dir, lt))
actions["LINK"] = new_link
except NoPackagesFound as e:
error_message = e.args[0]
if command == 'update' and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not args.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
args._skip.extend([i.split()[0] for i in e.pkgs])
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
for pkg in e.pkgs:
close = get_close_matches(pkg, packages, cutoff=0.7)
if close:
error_message += ("\n\nDid you mean one of these?"
"\n\n %s" % (', '.join(close)))
error_message += '\n\nYou can search for this package on anaconda.org with'
error_message += '\n\n anaconda search -t conda %s' % pkg
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message += '\n\n (and similarly for the other packages)'
if not find_executable('anaconda', include_others=False):
error_message += '\n\nYou may need to install the anaconda-client command line client with'
error_message += '\n\n conda install anaconda-client'
pinned_specs = plan.get_pinned_specs(prefix)
if pinned_specs:
error_message += "\n\nNote that you have pinned specs in %s:" % join(prefix, 'conda-meta', 'pinned')
error_message += "\n\n %r" % pinned_specs
common.error_and_exit(error_message, json=args.json)
except SystemExit as e:
# Unsatisfiable package specifications/no such revision/import error
error_type = 'UnsatisfiableSpecifications'
if e.args and 'could not import' in e.args[0]:
error_type = 'ImportError'
common.exception_and_exit(e, json=args.json, newline=True,
error_text=False,
error_type=error_type)
if plan.nothing_to_do(actions):
from conda.cli.main_list import print_packages
if not args.json:
regex = '^(%s)$' % '|'.join(spec_names)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if not args.json:
print()
print("Package plan for installation in environment %s:" % prefix)
plan.display_actions(actions, index)
if command in {'install', 'update'}:
common.check_write(command, prefix)
if not args.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
sys.exit(0)
with common.json_progress_bars(json=args.json and not args.quiet):
try:
plan.execute_actions(actions, index, verbose=not args.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
error_type = "AlreadyLocked"
else:
error_type = "RuntimeError"
common.exception_and_exit(e, error_type=error_type, json=args.json)
except SystemExit as e:
common.exception_and_exit(e, json=args.json)
if newenv:
misc.append_env(prefix)
misc.touch_nonadmin(prefix)
if not args.json:
print_activate(args.name if args.name else prefix)
if args.json:
common.stdout_json_success(actions=actions)
def check_install(packages, platform=None, channel_urls=(), prepend=True,
minimal_hint=False):
try:
prefix = tempfile.mkdtemp('conda')
specs = common.specs_from_args(packages)
index = get_index(channel_urls=channel_urls, prepend=prepend,
platform=platform)
actions = plan.install_actions(prefix, index, specs, pinned=False,
minimal_hint=minimal_hint)
plan.display_actions(actions, index)
return actions
finally:
ci.rm_rf(prefix)
| conda/cli/install.py
--- a/conda/cli/install.py
+++ b/conda/cli/install.py
@@ -196,12 +196,14 @@ def install(args, parser, command='install'):
common.check_specs(prefix, specs, json=args.json,
create=(command == 'create'))
- # handle tar file containing conda packages
+
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
depends = misc.install_local_packages(prefix, args.packages,
verbose=not args.quiet)
+ if args.no_deps:
+ depends = []
specs = list(set(depends))
args.unknown = True
else:
@@ -209,10 +211,14 @@ def install(args, parser, command='install'):
"cannot mix specifications with conda package filenames",
json=args.json,
error_type="ValueError")
+
+ # handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
depends = install_tar(prefix, tar_path, verbose=not args.quiet)
+ if args.no_deps:
+ depends = []
specs = list(set(depends))
args.unknown = True
|
`conda.install.rm_rf` can't delete old environments in Windows + Python 2.7
This issue hit me when trying to use `--force` option in `conda env create` (I added this in #102), and it revealed a possible problem with how conda handles links in Windows + Python 2.7.
# Symptom
Trying to delete an environment (not active) from within Python fails because the original file being linked is locked by Windows.
# Context
- Windows
- Python 2.7 (`os.islink` does not work here, which might affect the `rm_rf` function from `conda.install`)
# Reproducing
This command line is enough to reproduce the problem:
``` bat
$ conda create -n test pyyaml && python -c "import yaml;from conda.install import rm_rf;rm_rf('C:\\Miniconda\\envs\\test')"
Fetching package metadata: ......
#
# To activate this environment, use:
# > activate test
#
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "C:\Miniconda\lib\site-packages\conda\install.py", line 204, in rm_rf
shutil.rmtree(path)
File "C:\Miniconda\lib\shutil.py", line 247, in rmtree
rmtree(fullname, ignore_errors, onerror)
File "C:\Miniconda\lib\shutil.py", line 247, in rmtree
rmtree(fullname, ignore_errors, onerror)
File "C:\Miniconda\lib\shutil.py", line 252, in rmtree
onerror(os.remove, fullname, sys.exc_info())
File "C:\Miniconda\lib\shutil.py", line 250, in rmtree
os.remove(fullname)
WindowsError: [Error 5] Acesso negado: 'C:\\Miniconda\\envs\\test\\Lib\\site-packages\\yaml.dll'
```
Note that when I import `pyyaml`, I'm locking file `C:\\Miniconda\\Lib\\site-packages\\yaml.dll` (root environment), but that also prevents me from deleting `C:\\Miniconda\\envs\\test\\Lib\\site-packages\\yaml.dll` (another hard link).
# Solution?
Maybe we should add some improved support for detecting and unlinking hardlinks in Windows with Python 2.7, and handle those cases individually instead of trying to `shutil.rmtree` everything.
Possibly from [jarako.windows](https://bitbucket.org/jaraco/jaraco.windows/src/default/jaraco/windows/filesystem/__init__.py#cl-76) or [ntfs/fs.py](https://github.com/sid0/ntfs/blob/master/ntfsutils/fs.py#L88)
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import time
import os
import json
import errno
import shutil
import stat
import sys
import subprocess
import tarfile
import traceback
import logging
import shlex
from os.path import abspath, basename, dirname, isdir, isfile, islink, join
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
# on Windows we cannot update these packages in the root environment
# because of the file lock problem
win_ignore_root = set(['python'])
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def rm_rf(path, max_retries=5):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
os.unlink(path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
if abspath(prefix) != abspath(sys.prefix):
# we currently only want to create menu items for packages
# in default environment
return
menu_files = [f for f in files
if f.startswith('Menu/') and f.endswith('.json')]
if not menu_files:
return
try:
import menuinst
except ImportError:
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.lexists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.lexists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.lexists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.lexists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
if not isdir(prefix):
os.makedirs(prefix)
try:
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir should be the name of the directory in the trash
"""
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
if tempdir is None:
import tempfile
trash_dir = tempfile.mkdtemp(dir=trash_dir)
else:
trash_dir = join(trash_dir, tempdir)
try:
try:
os.makedirs(join(trash_dir, dirname(f)))
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
shutil.move(join(prefix, f), join(trash_dir, f))
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (f, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % f)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
if on_win:
# Try deleting the trash every time we link something.
delete_trash(prefix)
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_to_trash(prefix, f)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_to_trash(prefix, f)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dist) else
LINK_COPY)
link(pkgs_dir, prefix, dist, linktype)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import abspath, basename, dirname, isdir, isfile, islink, join, relpath
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
# on Windows we cannot update these packages in the root environment
# because of the file lock problem
win_ignore_root = set(['python'])
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
os.unlink(path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
if abspath(prefix) != abspath(sys.prefix):
# we currently only want to create menu items for packages
# in default environment
return
menu_files = [f for f in files
if f.startswith('Menu/') and f.endswith('.json')]
if not menu_files:
return
try:
import menuinst
except ImportError:
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.lexists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.lexists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.lexists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.lexists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
if not isdir(prefix):
os.makedirs(prefix)
try:
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
if (on_win and abspath(prefix) == abspath(sys.prefix) and
name_dist(dist) in win_ignore_root):
# on Windows we have the file lock problem, so don't allow
# linking or unlinking some packages
log.warn('Ignored: %s' % dist)
return
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dist) else
LINK_COPY)
link(pkgs_dir, prefix, dist, linktype)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -27,19 +27,19 @@
from __future__ import print_function, division, absolute_import
-import time
-import os
-import json
import errno
+import json
+import logging
+import os
+import shlex
import shutil
import stat
-import sys
import subprocess
+import sys
import tarfile
+import time
import traceback
-import logging
-import shlex
-from os.path import abspath, basename, dirname, isdir, isfile, islink, join
+from os.path import abspath, basename, dirname, isdir, isfile, islink, join, relpath
try:
from conda.lock import Locked
@@ -143,13 +143,14 @@ def _remove_readonly(func, path, excinfo):
func(path)
-def rm_rf(path, max_retries=5):
+def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
+ If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
@@ -180,6 +181,15 @@ def rm_rf(path, max_retries=5):
if not isdir(path):
return
+ if trash:
+ try:
+ move_path_to_trash(path)
+ if not isdir(path):
+ return
+ except OSError as e2:
+ raise
+ msg += "Retry with onerror failed (%s)\n" % e2
+
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
@@ -497,14 +507,14 @@ def is_linked(prefix, dist):
except IOError:
return None
-def delete_trash(prefix):
+def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
- rm_rf(trash_dir, max_retries=1)
+ rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
@@ -512,11 +522,23 @@ def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
- tempdir should be the name of the directory in the trash
+ tempdir is a deprecated parameter, and will be ignored.
+
+ This function is deprecated in favor of `move_path_to_trash`.
+ """
+ return move_path_to_trash(join(prefix, f))
+
+def move_path_to_trash(path):
+ """
+ Move a path to the trash
"""
+ # Try deleting the trash every time we use it.
+ delete_trash()
+
from conda import config
for pkg_dir in config.pkgs_dirs:
+ import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
@@ -525,26 +547,23 @@ def move_to_trash(prefix, f, tempdir=None):
if e1.errno != errno.EEXIST:
continue
- if tempdir is None:
- import tempfile
- trash_dir = tempfile.mkdtemp(dir=trash_dir)
- else:
- trash_dir = join(trash_dir, tempdir)
+ trash_dir = tempfile.mkdtemp(dir=trash_dir)
+ trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
- try:
- os.makedirs(join(trash_dir, dirname(f)))
- except OSError as e1:
- if e1.errno != errno.EEXIST:
- continue
- shutil.move(join(prefix, f), join(trash_dir, f))
+ os.makedirs(trash_dir)
+ except OSError as e2:
+ if e2.errno != errno.EEXIST:
+ continue
+ try:
+ shutil.move(path, trash_dir)
except OSError as e:
- log.debug("Could not move %s to %s (%s)" % (f, trash_dir, e))
+ log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
- log.debug("Could not move %s to trash" % f)
+ log.debug("Could not move %s to trash" % path)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
@@ -556,11 +575,6 @@ def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
- if on_win:
- # Try deleting the trash every time we link something.
- delete_trash(prefix)
-
-
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
@@ -595,7 +609,7 @@ def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
log.error('failed to unlink: %r' % dst)
if on_win:
try:
- move_to_trash(prefix, f)
+ move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
@@ -685,7 +699,7 @@ def unlink(prefix, dist):
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
- move_to_trash(prefix, f)
+ move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass |
conda remove shouldn't fetch the package metadata
It only needs it to print the channel location. It should either get that locally, or at least just use the index cache.
| conda/cli/common.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import re
import os
import sys
import argparse
import contextlib
from os.path import abspath, basename, expanduser, isdir, join
import textwrap
import conda.config as config
from conda import console
from conda.utils import memoize
class Completer(object):
"""
Subclass this class to get tab completion from argcomplete
There are two ways to use this. One is to subclass and define `_get_items(self)`
to return a list of all possible completions, and put that as the choices
in the add_argument. If you do that, you will probably also want to set
metavar to something, so that the argparse help doesn't show all possible
choices.
Another option is to define `_get_items(self)` in the same way, but also
define `__init__(self, prefix, parsed_args, **kwargs)` (I'm not sure what
goes in kwargs). The prefix will be the parsed arguments so far, and
`parsed_args` will be an argparse args object. Then use
p.add_argument('argname', ...).completer = TheSubclass
Use this second option if the set of completions depends on the command
line flags (e.g., the list of completed packages to install changes if -c
flags are used).
"""
@memoize
def get_items(self):
return self._get_items()
def __contains__(self, item):
# This generally isn't all possibilities, and even if it is, we want
# to give better error messages than argparse
return True
def __iter__(self):
return iter(self.get_items())
class Environments(Completer):
def _get_items(self):
res = []
for dir in config.envs_dirs:
try:
res.extend(os.listdir(dir))
except OSError:
pass
return res
class Packages(Completer):
def __init__(self, prefix, parsed_args, **kwargs):
self.prefix = prefix
self.parsed_args = parsed_args
def _get_items(self):
# TODO: Include .tar.bz2 files for local installs.
from conda.api import get_index
args = self.parsed_args
call_dict = dict(channel_urls=args.channel or (), use_cache=True,
prepend=not args.override_channels, unknown=args.unknown,
offline=args.offline)
if hasattr(args, 'platform'): # in search
call_dict['platform'] = args.platform
index = get_index(**call_dict)
return [i.rsplit('-', 2)[0] for i in index]
class InstalledPackages(Completer):
def __init__(self, prefix, parsed_args, **kwargs):
self.prefix = prefix
self.parsed_args = parsed_args
@memoize
def _get_items(self):
import conda.install
packages = conda.install.linked(get_prefix(self.parsed_args))
return [i.rsplit('-', 2)[0] for i in packages]
def add_parser_help(p):
"""
So we can use consistent capitalization and periods in the help. You must
use the add_help=False argument to ArgumentParser or add_parser to use
this. Add this first to be consistent with the default argparse output.
"""
p.add_argument(
'-h', '--help',
action=argparse._HelpAction,
help="Show this help message and exit.",
)
def add_parser_prefix(p):
npgroup = p.add_mutually_exclusive_group()
npgroup.add_argument(
'-n', "--name",
action="store",
help="Name of environment (in %s)." %
os.pathsep.join(config.envs_dirs),
metavar="ENVIRONMENT",
choices=Environments(),
)
npgroup.add_argument(
'-p', "--prefix",
action="store",
help="Full path to environment prefix (default: %s)." %
config.default_prefix,
metavar='PATH',
)
def add_parser_yes(p):
p.add_argument(
"-y", "--yes",
action="store_true",
help="Do not ask for confirmation.",
)
p.add_argument(
"--dry-run",
action="store_true",
help="Only display what would have been done.",
)
def add_parser_json(p):
p.add_argument(
"--json",
action="store_true",
help="Report all output as json. Suitable for using conda programmatically."
)
def add_parser_quiet(p):
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Do not display progress bar.",
)
def add_parser_channels(p):
p.add_argument('-c', '--channel',
action="append",
help="""Additional channel to search for packages. These are URLs searched in the order
they are given (including file:// for local directories). Then, the defaults
or channels from .condarc are searched (unless --override-channels is given). You can use
'defaults' to get the default packages for conda, and 'system' to get the system
packages, which also takes .condarc into account. You can also use any name and the
.condarc channel_alias value will be prepended. The default channel_alias
is http://conda.anaconda.org/.""" # we can't put , here; invalid syntax
)
p.add_argument(
"--override-channels",
action="store_true",
help="""Do not search default or .condarc channels. Requires --channel.""",
)
def add_parser_known(p):
p.add_argument(
"--unknown",
action="store_true",
default=False,
dest='unknown',
help="Use index metadata from the local package cache, "
"which are from unknown channels (installing local packages "
"directly implies this option).",
)
def add_parser_use_index_cache(p):
p.add_argument(
"--use-index-cache",
action="store_true",
default=False,
help="Use cache of channel index files.",
)
def add_parser_copy(p):
p.add_argument(
'--copy',
action="store_true",
help="Install all packages using copies instead of hard- or soft-linking."
)
def add_parser_pscheck(p):
p.add_argument(
"--force-pscheck",
action="store_true",
help=("No-op. Included for backwards compatibility (deprecated)."
if config.platform == 'win' else argparse.SUPPRESS)
)
def add_parser_install(p):
add_parser_yes(p)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force install (even when package already installed), "
"implies --no-deps.",
)
add_parser_pscheck(p)
# Add the file kwarg. We don't use {action="store", nargs='*'} as we don't
# want to gobble up all arguments after --file.
p.add_argument(
"--file",
default=[],
action='append',
help="Read package versions from the given file. Repeated file "
"specifications can be passed (e.g. --file=file1 --file=file2).",
)
add_parser_known(p)
p.add_argument(
"--no-deps",
action="store_true",
help="Do not install dependencies.",
)
p.add_argument(
'-m', "--mkdir",
action="store_true",
help="Create the environment directory if necessary.",
)
add_parser_use_index_cache(p)
add_parser_use_local(p)
add_parser_offline(p)
add_parser_no_pin(p)
add_parser_channels(p)
add_parser_prefix(p)
add_parser_quiet(p)
add_parser_copy(p)
p.add_argument(
"--alt-hint",
action="store_true",
default=False,
help="Use an alternate algorithm to generate an unsatisfiability hint.")
p.add_argument(
"--update-dependencies", "--update-deps",
action="store_true",
dest="update_deps",
default=config.update_dependencies,
help="Update dependencies (default: %(default)s).",
)
p.add_argument(
"--no-update-dependencies", "--no-update-deps",
action="store_false",
dest="update_deps",
default=not config.update_dependencies,
help="Don't update dependencies (default: %(default)s).",
)
add_parser_show_channel_urls(p)
if 'update' in p.prog:
# I don't know if p.prog is the correct thing to use here but it's the
# only thing that seemed to contain the command name
p.add_argument(
'packages',
metavar='package_spec',
action="store",
nargs='*',
help="Packages to update in the conda environment.",
).completer=InstalledPackages
else: # create or install
# Same as above except the completer is not only installed packages
p.add_argument(
'packages',
metavar='package_spec',
action="store",
nargs='*',
help="Packages to install into the conda environment.",
).completer=Packages
def add_parser_use_local(p):
p.add_argument(
"--use-local",
action="store_true",
default=False,
help="Use locally built packages.",
)
def add_parser_offline(p):
p.add_argument(
"--offline",
action="store_true",
default=False,
help="Offline mode, don't connect to the Internet.",
)
def add_parser_no_pin(p):
p.add_argument(
"--no-pin",
action="store_false",
default=True,
dest='pinned',
help="Ignore pinned file.",
)
def add_parser_show_channel_urls(p):
p.add_argument(
"--show-channel-urls",
action="store_true",
dest="show_channel_urls",
default=config.show_channel_urls,
help="Show channel urls (default: %(default)s).",
)
p.add_argument(
"--no-show-channel-urls",
action="store_false",
dest="show_channel_urls",
default=not config.show_channel_urls,
help="Don't show channel urls (default: %(default)s).",
)
def ensure_override_channels_requires_channel(args, dashc=True, json=False):
if args.override_channels and not (args.channel or args.use_local):
if dashc:
error_and_exit('--override-channels requires -c/--channel or --use-local', json=json,
error_type="ValueError")
else:
error_and_exit('--override-channels requires --channel or --use-local', json=json,
error_type="ValueError")
def confirm(args, message="Proceed", choices=('yes', 'no'), default='yes'):
assert default in choices, default
if args.dry_run:
print("Dry run: exiting")
sys.exit(0)
options = []
for option in choices:
if option == default:
options.append('[%s]' % option[0])
else:
options.append(option[0])
message = "%s (%s)? " % (message, '/'.join(options))
choices = {alt:choice for choice in choices for alt in [choice,
choice[0]]}
choices[''] = default
while True:
# raw_input has a bug and prints to stderr, not desirable
sys.stdout.write(message)
sys.stdout.flush()
user_choice = sys.stdin.readline().strip().lower()
if user_choice not in choices:
print("Invalid choice: %s" % user_choice)
else:
sys.stdout.write("\n")
sys.stdout.flush()
return choices[user_choice]
def confirm_yn(args, message="Proceed", default='yes', exit_no=True):
if args.dry_run:
print("Dry run: exiting")
sys.exit(0)
if args.yes or config.always_yes:
return True
try:
choice = confirm(args, message=message, choices=('yes', 'no'),
default=default)
except KeyboardInterrupt:
# no need to exit by showing the traceback
sys.exit("\nOperation aborted. Exiting.")
if choice == 'yes':
return True
if exit_no:
sys.exit(1)
return False
# --------------------------------------------------------------------
def ensure_name_or_prefix(args, command):
if not (args.name or args.prefix):
error_and_exit('either -n NAME or -p PREFIX option required,\n'
' try "conda %s -h" for more details' % command,
json=getattr(args, 'json', False),
error_type="ValueError")
def find_prefix_name(name):
if name == config.root_env_name:
return config.root_dir
for envs_dir in config.envs_dirs:
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
return None
def get_prefix(args, search=True):
if args.name:
if '/' in args.name:
error_and_exit("'/' not allowed in environment name: %s" %
args.name,
json=getattr(args, 'json', False),
error_type="ValueError")
if args.name == config.root_env_name:
return config.root_dir
if search:
prefix = find_prefix_name(args.name)
if prefix:
return prefix
return join(config.envs_dirs[0], args.name)
if args.prefix:
return abspath(expanduser(args.prefix))
return config.default_prefix
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(config.root_dir) and
not config.root_writable)
def name_prefix(prefix):
if abspath(prefix) == config.root_dir:
return config.root_env_name
return basename(prefix)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
# -------------------------------------------------------------------------
def arg2spec(arg, json=False):
spec = spec_from_line(arg)
if spec is None:
error_and_exit('Invalid package specification: %s' % arg,
json=json,
error_type="ValueError")
parts = spec.split()
name = parts[0]
if name in config.disallow:
error_and_exit("specification '%s' is disallowed" % name,
json=json,
error_type="ValueError")
if len(parts) == 2:
ver = parts[1]
if not ver.startswith(('=', '>', '<', '!')):
if ver.endswith('.0'):
return '%s %s|%s*' % (name, ver[:-2], ver)
else:
return '%s %s*' % (name, ver)
return spec
def specs_from_args(args, json=False):
return [arg2spec(arg, json=json) for arg in args]
spec_pat = re.compile(r'''
(?P<name>[^=<>!\s]+) # package name
\s* # ignore spaces
(
(?P<cc>=[^=<>!]+(=[^=<>!]+)?) # conda constraint
|
(?P<pc>[=<>!]{1,2}.+) # new (pip-style) constraint(s)
)?
$ # end-of-line
''', re.VERBOSE)
def strip_comment(line):
return line.split('#')[0].rstrip()
def spec_from_line(line):
m = spec_pat.match(strip_comment(line))
if m is None:
return None
name, cc, pc = (m.group('name').lower(), m.group('cc'), m.group('pc'))
if cc:
return name + cc.replace('=', ' ')
elif pc:
return name + ' ' + pc.replace(' ', '')
else:
return name
def specs_from_url(url, json=False):
from conda.fetch import TmpDownload
with TmpDownload(url, verbose=False) as path:
specs = []
try:
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
spec = spec_from_line(line)
if spec is None:
error_and_exit("could not parse '%s' in: %s" %
(line, url), json=json,
error_type="ValueError")
specs.append(spec)
except IOError:
error_and_exit('cannot open file: %s' % path,
json=json,
error_type="IOError")
return specs
def names_in_specs(names, specs):
return any(spec.split()[0] in names for spec in specs)
def check_specs(prefix, specs, json=False, create=False):
if len(specs) == 0:
msg = ('too few arguments, must supply command line '
'package specs or --file')
if create:
msg += textwrap.dedent("""
You can specify one or more default packages to install when creating
an environment. Doing so allows you to call conda create without
explicitly providing any package names.
To set the provided packages, call conda config like this:
conda config --add create_default_packages PACKAGE_NAME
""")
error_and_exit(msg,
json=json,
error_type="ValueError")
def disp_features(features):
if features:
return '[%s]' % ' '.join(features)
else:
return ''
def stdout_json(d):
import json
json.dump(d, sys.stdout, indent=2, sort_keys=True)
sys.stdout.write('\n')
def error_and_exit(message, json=False, newline=False, error_text=True,
error_type=None):
if json:
stdout_json(dict(error=message, error_type=error_type))
sys.exit(1)
else:
if newline:
print()
if error_text:
sys.exit("Error: " + message)
else:
sys.exit(message)
def exception_and_exit(exc, **kwargs):
if 'error_type' not in kwargs:
kwargs['error_type'] = exc.__class__.__name__
error_and_exit('; '.join(map(str, exc.args)), **kwargs)
def get_index_trap(*args, **kwargs):
"""
Retrieves the package index, but traps exceptions and reports them as
JSON if necessary.
"""
from conda.api import get_index
if 'json' in kwargs:
json = kwargs['json']
del kwargs['json']
else:
json = False
try:
return get_index(*args, **kwargs)
except BaseException as e:
if json:
exception_and_exit(e, json=json)
else:
raise
@contextlib.contextmanager
def json_progress_bars(json=False):
if json:
with console.json_progress_bars():
yield
else:
yield
def stdout_json_success(success=True, **kwargs):
result = { 'success': success }
result.update(kwargs)
stdout_json(result)
root_no_rm = 'python', 'pycosat', 'pyyaml', 'conda', 'openssl', 'requests'
def handle_envs_list(acc, output=True):
from conda import misc
if output:
print("# conda environments:")
print("#")
def disp_env(prefix):
fmt = '%-20s %s %s'
default = '*' if prefix == config.default_prefix else ' '
name = (config.root_env_name if prefix == config.root_dir else
basename(prefix))
if output:
print(fmt % (name, default, prefix))
for prefix in misc.list_prefixes():
disp_env(prefix)
if prefix != config.root_dir:
acc.append(prefix)
if output:
print()
<|code_end|>
conda/cli/main_remove.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from os.path import join, exists
from argparse import RawDescriptionHelpFormatter
import errno
import logging
from conda.cli import common
from conda.console import json_progress_bars
help = "%s a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution. Note: conda uninstall is an alias for conda remove.
"""
example = """
Examples:
conda %s -n myenv scipy
"""
uninstall_help = "Alias for conda remove. See conda remove --help."
log = logging.getLogger(__name__)
def configure_parser(sub_parsers, name='remove'):
if name == 'remove':
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=descr % name.capitalize(),
help=help % name.capitalize(),
epilog=example % name,
add_help=False,
)
else:
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=uninstall_help,
help=uninstall_help,
epilog=example % name,
add_help=False,
)
common.add_parser_help(p)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"--all",
action="store_true",
help="%s all packages, i.e., the entire environment." % name.capitalize(),
)
p.add_argument(
"--features",
action="store_true",
help="%s features (instead of packages)." % name.capitalize(),
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
common.add_parser_use_index_cache(p)
common.add_parser_use_local(p)
common.add_parser_offline(p)
common.add_parser_pscheck(p)
p.add_argument(
'package_names',
metavar='package_name',
action="store",
nargs='*',
help="Package names to %s from the environment." % name,
).completer = common.InstalledPackages
p.set_defaults(func=execute)
def execute(args, parser):
import conda.plan as plan
import conda.instructions as inst
from conda.install import rm_rf, linked
from conda import config
if not (args.all or args.package_names):
common.error_and_exit('no package names supplied,\n'
' try "conda remove -h" for more details',
json=args.json,
error_type="ValueError")
prefix = common.get_prefix(args)
if args.all and prefix == config.default_prefix:
common.error_and_exit("cannot remove current environment. deactivate and run conda remove again")
common.check_write('remove', prefix, json=args.json)
common.ensure_override_channels_requires_channel(args, json=args.json)
channel_urls = args.channel or ()
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
if exists(croot):
channel_urls = [url_path(croot)] + list(channel_urls)
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
else:
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
specs = None
if args.features:
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
common.error_and_exit('cannot remove root environment,\n'
' add -n NAME or -p PREFIX option',
json=args.json,
error_type="CantRemoveRoot")
actions = {inst.PREFIX: prefix}
for dist in sorted(linked(prefix)):
plan.add_unlink(actions, dist)
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
common.error_and_exit('cannot remove %s from root environment' %
', '.join(common.root_no_rm),
json=args.json,
error_type="CantRemoveFromRoot")
actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
return
common.error_and_exit('no packages found to remove from '
'environment: %s' % prefix,
json=args.json,
error_type="PackageNotInstalled")
if not args.json:
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if args.json and args.dry_run:
common.stdout_json({
'success': True,
'dry_run': True,
'actions': actions
})
return
if not args.json:
common.confirm_yn(args)
if args.json and not args.quiet:
with json_progress_bars():
plan.execute_actions(actions, index, verbose=not args.quiet)
else:
plan.execute_actions(actions, index, verbose=not args.quiet)
if specs:
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# remove specs: %s\n' % specs)
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
<|code_end|>
| conda/cli/common.py
from __future__ import print_function, division, absolute_import
import re
import os
import sys
import argparse
import contextlib
from os.path import abspath, basename, expanduser, isdir, join
import textwrap
import conda.config as config
from conda import console
from conda.utils import memoize
class Completer(object):
"""
Subclass this class to get tab completion from argcomplete
There are two ways to use this. One is to subclass and define `_get_items(self)`
to return a list of all possible completions, and put that as the choices
in the add_argument. If you do that, you will probably also want to set
metavar to something, so that the argparse help doesn't show all possible
choices.
Another option is to define `_get_items(self)` in the same way, but also
define `__init__(self, prefix, parsed_args, **kwargs)` (I'm not sure what
goes in kwargs). The prefix will be the parsed arguments so far, and
`parsed_args` will be an argparse args object. Then use
p.add_argument('argname', ...).completer = TheSubclass
Use this second option if the set of completions depends on the command
line flags (e.g., the list of completed packages to install changes if -c
flags are used).
"""
@memoize
def get_items(self):
return self._get_items()
def __contains__(self, item):
# This generally isn't all possibilities, and even if it is, we want
# to give better error messages than argparse
return True
def __iter__(self):
return iter(self.get_items())
class Environments(Completer):
def _get_items(self):
res = []
for dir in config.envs_dirs:
try:
res.extend(os.listdir(dir))
except OSError:
pass
return res
class Packages(Completer):
def __init__(self, prefix, parsed_args, **kwargs):
self.prefix = prefix
self.parsed_args = parsed_args
def _get_items(self):
# TODO: Include .tar.bz2 files for local installs.
from conda.api import get_index
args = self.parsed_args
call_dict = dict(channel_urls=args.channel or (), use_cache=True,
prepend=not args.override_channels, unknown=args.unknown,
offline=args.offline)
if hasattr(args, 'platform'): # in search
call_dict['platform'] = args.platform
index = get_index(**call_dict)
return [i.rsplit('-', 2)[0] for i in index]
class InstalledPackages(Completer):
def __init__(self, prefix, parsed_args, **kwargs):
self.prefix = prefix
self.parsed_args = parsed_args
@memoize
def _get_items(self):
import conda.install
packages = conda.install.linked(get_prefix(self.parsed_args))
return [i.rsplit('-', 2)[0] for i in packages]
def add_parser_help(p):
"""
So we can use consistent capitalization and periods in the help. You must
use the add_help=False argument to ArgumentParser or add_parser to use
this. Add this first to be consistent with the default argparse output.
"""
p.add_argument(
'-h', '--help',
action=argparse._HelpAction,
help="Show this help message and exit.",
)
def add_parser_prefix(p):
npgroup = p.add_mutually_exclusive_group()
npgroup.add_argument(
'-n', "--name",
action="store",
help="Name of environment (in %s)." %
os.pathsep.join(config.envs_dirs),
metavar="ENVIRONMENT",
choices=Environments(),
)
npgroup.add_argument(
'-p', "--prefix",
action="store",
help="Full path to environment prefix (default: %s)." %
config.default_prefix,
metavar='PATH',
)
def add_parser_yes(p):
p.add_argument(
"-y", "--yes",
action="store_true",
help="Do not ask for confirmation.",
)
p.add_argument(
"--dry-run",
action="store_true",
help="Only display what would have been done.",
)
def add_parser_json(p):
p.add_argument(
"--json",
action="store_true",
help="Report all output as json. Suitable for using conda programmatically."
)
def add_parser_quiet(p):
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Do not display progress bar.",
)
def add_parser_channels(p):
p.add_argument('-c', '--channel',
action="append",
help="""Additional channel to search for packages. These are URLs searched in the order
they are given (including file:// for local directories). Then, the defaults
or channels from .condarc are searched (unless --override-channels is given). You can use
'defaults' to get the default packages for conda, and 'system' to get the system
packages, which also takes .condarc into account. You can also use any name and the
.condarc channel_alias value will be prepended. The default channel_alias
is http://conda.anaconda.org/.""" # we can't put , here; invalid syntax
)
p.add_argument(
"--override-channels",
action="store_true",
help="""Do not search default or .condarc channels. Requires --channel.""",
)
def add_parser_known(p):
p.add_argument(
"--unknown",
action="store_true",
default=False,
dest='unknown',
help="Use index metadata from the local package cache, "
"which are from unknown channels (installing local packages "
"directly implies this option).",
)
def add_parser_use_index_cache(p):
p.add_argument(
"--use-index-cache",
action="store_true",
default=False,
help="Use cache of channel index files.",
)
def add_parser_no_use_index_cache(p):
p.add_argument(
"--no-use-index-cache",
action="store_false",
default=True,
dest="use_index_cache",
help="Use cache of channel index files.",
)
def add_parser_copy(p):
p.add_argument(
'--copy',
action="store_true",
help="Install all packages using copies instead of hard- or soft-linking."
)
def add_parser_pscheck(p):
p.add_argument(
"--force-pscheck",
action="store_true",
help=("No-op. Included for backwards compatibility (deprecated)."
if config.platform == 'win' else argparse.SUPPRESS)
)
def add_parser_install(p):
add_parser_yes(p)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force install (even when package already installed), "
"implies --no-deps.",
)
add_parser_pscheck(p)
# Add the file kwarg. We don't use {action="store", nargs='*'} as we don't
# want to gobble up all arguments after --file.
p.add_argument(
"--file",
default=[],
action='append',
help="Read package versions from the given file. Repeated file "
"specifications can be passed (e.g. --file=file1 --file=file2).",
)
add_parser_known(p)
p.add_argument(
"--no-deps",
action="store_true",
help="Do not install dependencies.",
)
p.add_argument(
'-m', "--mkdir",
action="store_true",
help="Create the environment directory if necessary.",
)
add_parser_use_index_cache(p)
add_parser_use_local(p)
add_parser_offline(p)
add_parser_no_pin(p)
add_parser_channels(p)
add_parser_prefix(p)
add_parser_quiet(p)
add_parser_copy(p)
p.add_argument(
"--alt-hint",
action="store_true",
default=False,
help="Use an alternate algorithm to generate an unsatisfiability hint.")
p.add_argument(
"--update-dependencies", "--update-deps",
action="store_true",
dest="update_deps",
default=config.update_dependencies,
help="Update dependencies (default: %(default)s).",
)
p.add_argument(
"--no-update-dependencies", "--no-update-deps",
action="store_false",
dest="update_deps",
default=not config.update_dependencies,
help="Don't update dependencies (default: %(default)s).",
)
add_parser_show_channel_urls(p)
if 'update' in p.prog:
# I don't know if p.prog is the correct thing to use here but it's the
# only thing that seemed to contain the command name
p.add_argument(
'packages',
metavar='package_spec',
action="store",
nargs='*',
help="Packages to update in the conda environment.",
).completer=InstalledPackages
else: # create or install
# Same as above except the completer is not only installed packages
p.add_argument(
'packages',
metavar='package_spec',
action="store",
nargs='*',
help="Packages to install into the conda environment.",
).completer=Packages
def add_parser_use_local(p):
p.add_argument(
"--use-local",
action="store_true",
default=False,
help="Use locally built packages.",
)
def add_parser_offline(p):
p.add_argument(
"--offline",
action="store_true",
default=False,
help="Offline mode, don't connect to the Internet.",
)
def add_parser_no_pin(p):
p.add_argument(
"--no-pin",
action="store_false",
default=True,
dest='pinned',
help="Ignore pinned file.",
)
def add_parser_show_channel_urls(p):
p.add_argument(
"--show-channel-urls",
action="store_true",
dest="show_channel_urls",
default=config.show_channel_urls,
help="Show channel urls (default: %(default)s).",
)
p.add_argument(
"--no-show-channel-urls",
action="store_false",
dest="show_channel_urls",
default=not config.show_channel_urls,
help="Don't show channel urls (default: %(default)s).",
)
def ensure_override_channels_requires_channel(args, dashc=True, json=False):
if args.override_channels and not (args.channel or args.use_local):
if dashc:
error_and_exit('--override-channels requires -c/--channel or --use-local', json=json,
error_type="ValueError")
else:
error_and_exit('--override-channels requires --channel or --use-local', json=json,
error_type="ValueError")
def confirm(args, message="Proceed", choices=('yes', 'no'), default='yes'):
assert default in choices, default
if args.dry_run:
print("Dry run: exiting")
sys.exit(0)
options = []
for option in choices:
if option == default:
options.append('[%s]' % option[0])
else:
options.append(option[0])
message = "%s (%s)? " % (message, '/'.join(options))
choices = {alt:choice for choice in choices for alt in [choice,
choice[0]]}
choices[''] = default
while True:
# raw_input has a bug and prints to stderr, not desirable
sys.stdout.write(message)
sys.stdout.flush()
user_choice = sys.stdin.readline().strip().lower()
if user_choice not in choices:
print("Invalid choice: %s" % user_choice)
else:
sys.stdout.write("\n")
sys.stdout.flush()
return choices[user_choice]
def confirm_yn(args, message="Proceed", default='yes', exit_no=True):
if args.dry_run:
print("Dry run: exiting")
sys.exit(0)
if args.yes or config.always_yes:
return True
try:
choice = confirm(args, message=message, choices=('yes', 'no'),
default=default)
except KeyboardInterrupt:
# no need to exit by showing the traceback
sys.exit("\nOperation aborted. Exiting.")
if choice == 'yes':
return True
if exit_no:
sys.exit(1)
return False
# --------------------------------------------------------------------
def ensure_name_or_prefix(args, command):
if not (args.name or args.prefix):
error_and_exit('either -n NAME or -p PREFIX option required,\n'
' try "conda %s -h" for more details' % command,
json=getattr(args, 'json', False),
error_type="ValueError")
def find_prefix_name(name):
if name == config.root_env_name:
return config.root_dir
for envs_dir in config.envs_dirs:
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
return None
def get_prefix(args, search=True):
if args.name:
if '/' in args.name:
error_and_exit("'/' not allowed in environment name: %s" %
args.name,
json=getattr(args, 'json', False),
error_type="ValueError")
if args.name == config.root_env_name:
return config.root_dir
if search:
prefix = find_prefix_name(args.name)
if prefix:
return prefix
return join(config.envs_dirs[0], args.name)
if args.prefix:
return abspath(expanduser(args.prefix))
return config.default_prefix
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(config.root_dir) and
not config.root_writable)
def name_prefix(prefix):
if abspath(prefix) == config.root_dir:
return config.root_env_name
return basename(prefix)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
# -------------------------------------------------------------------------
def arg2spec(arg, json=False):
spec = spec_from_line(arg)
if spec is None:
error_and_exit('Invalid package specification: %s' % arg,
json=json,
error_type="ValueError")
parts = spec.split()
name = parts[0]
if name in config.disallow:
error_and_exit("specification '%s' is disallowed" % name,
json=json,
error_type="ValueError")
if len(parts) == 2:
ver = parts[1]
if not ver.startswith(('=', '>', '<', '!')):
if ver.endswith('.0'):
return '%s %s|%s*' % (name, ver[:-2], ver)
else:
return '%s %s*' % (name, ver)
return spec
def specs_from_args(args, json=False):
return [arg2spec(arg, json=json) for arg in args]
spec_pat = re.compile(r'''
(?P<name>[^=<>!\s]+) # package name
\s* # ignore spaces
(
(?P<cc>=[^=<>!]+(=[^=<>!]+)?) # conda constraint
|
(?P<pc>[=<>!]{1,2}.+) # new (pip-style) constraint(s)
)?
$ # end-of-line
''', re.VERBOSE)
def strip_comment(line):
return line.split('#')[0].rstrip()
def spec_from_line(line):
m = spec_pat.match(strip_comment(line))
if m is None:
return None
name, cc, pc = (m.group('name').lower(), m.group('cc'), m.group('pc'))
if cc:
return name + cc.replace('=', ' ')
elif pc:
return name + ' ' + pc.replace(' ', '')
else:
return name
def specs_from_url(url, json=False):
from conda.fetch import TmpDownload
with TmpDownload(url, verbose=False) as path:
specs = []
try:
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
spec = spec_from_line(line)
if spec is None:
error_and_exit("could not parse '%s' in: %s" %
(line, url), json=json,
error_type="ValueError")
specs.append(spec)
except IOError:
error_and_exit('cannot open file: %s' % path,
json=json,
error_type="IOError")
return specs
def names_in_specs(names, specs):
return any(spec.split()[0] in names for spec in specs)
def check_specs(prefix, specs, json=False, create=False):
if len(specs) == 0:
msg = ('too few arguments, must supply command line '
'package specs or --file')
if create:
msg += textwrap.dedent("""
You can specify one or more default packages to install when creating
an environment. Doing so allows you to call conda create without
explicitly providing any package names.
To set the provided packages, call conda config like this:
conda config --add create_default_packages PACKAGE_NAME
""")
error_and_exit(msg,
json=json,
error_type="ValueError")
def disp_features(features):
if features:
return '[%s]' % ' '.join(features)
else:
return ''
def stdout_json(d):
import json
json.dump(d, sys.stdout, indent=2, sort_keys=True)
sys.stdout.write('\n')
def error_and_exit(message, json=False, newline=False, error_text=True,
error_type=None):
if json:
stdout_json(dict(error=message, error_type=error_type))
sys.exit(1)
else:
if newline:
print()
if error_text:
sys.exit("Error: " + message)
else:
sys.exit(message)
def exception_and_exit(exc, **kwargs):
if 'error_type' not in kwargs:
kwargs['error_type'] = exc.__class__.__name__
error_and_exit('; '.join(map(str, exc.args)), **kwargs)
def get_index_trap(*args, **kwargs):
"""
Retrieves the package index, but traps exceptions and reports them as
JSON if necessary.
"""
from conda.api import get_index
if 'json' in kwargs:
json = kwargs['json']
del kwargs['json']
else:
json = False
try:
return get_index(*args, **kwargs)
except BaseException as e:
if json:
exception_and_exit(e, json=json)
else:
raise
@contextlib.contextmanager
def json_progress_bars(json=False):
if json:
with console.json_progress_bars():
yield
else:
yield
def stdout_json_success(success=True, **kwargs):
result = { 'success': success }
result.update(kwargs)
stdout_json(result)
root_no_rm = 'python', 'pycosat', 'pyyaml', 'conda', 'openssl', 'requests'
def handle_envs_list(acc, output=True):
from conda import misc
if output:
print("# conda environments:")
print("#")
def disp_env(prefix):
fmt = '%-20s %s %s'
default = '*' if prefix == config.default_prefix else ' '
name = (config.root_env_name if prefix == config.root_dir else
basename(prefix))
if output:
print(fmt % (name, default, prefix))
for prefix in misc.list_prefixes():
disp_env(prefix)
if prefix != config.root_dir:
acc.append(prefix)
if output:
print()
conda/cli/main_remove.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from os.path import join, exists
from argparse import RawDescriptionHelpFormatter
import errno
import logging
from conda.cli import common
from conda.console import json_progress_bars
help = "%s a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution. Note: conda uninstall is an alias for conda remove.
"""
example = """
Examples:
conda %s -n myenv scipy
"""
uninstall_help = "Alias for conda remove. See conda remove --help."
log = logging.getLogger(__name__)
def configure_parser(sub_parsers, name='remove'):
if name == 'remove':
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=descr % name.capitalize(),
help=help % name.capitalize(),
epilog=example % name,
add_help=False,
)
else:
p = sub_parsers.add_parser(
name,
formatter_class=RawDescriptionHelpFormatter,
description=uninstall_help,
help=uninstall_help,
epilog=example % name,
add_help=False,
)
common.add_parser_help(p)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"--all",
action="store_true",
help="%s all packages, i.e., the entire environment." % name.capitalize(),
)
p.add_argument(
"--features",
action="store_true",
help="%s features (instead of packages)." % name.capitalize(),
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
# Putting this one first makes it the default
common.add_parser_no_use_index_cache(p)
common.add_parser_use_index_cache(p)
common.add_parser_use_local(p)
common.add_parser_offline(p)
common.add_parser_pscheck(p)
p.add_argument(
'package_names',
metavar='package_name',
action="store",
nargs='*',
help="Package names to %s from the environment." % name,
).completer = common.InstalledPackages
p.set_defaults(func=execute)
def execute(args, parser):
import conda.plan as plan
import conda.instructions as inst
from conda.install import rm_rf, linked
from conda import config
if not (args.all or args.package_names):
common.error_and_exit('no package names supplied,\n'
' try "conda remove -h" for more details',
json=args.json,
error_type="ValueError")
prefix = common.get_prefix(args)
if args.all and prefix == config.default_prefix:
common.error_and_exit("cannot remove current environment. deactivate and run conda remove again")
common.check_write('remove', prefix, json=args.json)
common.ensure_override_channels_requires_channel(args, json=args.json)
channel_urls = args.channel or ()
if args.use_local:
from conda.fetch import fetch_index
from conda.utils import url_path
try:
from conda_build.config import croot
except ImportError:
common.error_and_exit("you need to have 'conda-build >= 1.7.1' installed"
" to use the --use-local option",
json=args.json,
error_type="RuntimeError")
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
fetch_index.cache = {}
if exists(croot):
channel_urls = [url_path(croot)] + list(channel_urls)
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
else:
index = common.get_index_trap(channel_urls=channel_urls,
prepend=not args.override_channels,
use_cache=args.use_index_cache,
json=args.json,
offline=args.offline)
specs = None
if args.features:
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
common.error_and_exit('cannot remove root environment,\n'
' add -n NAME or -p PREFIX option',
json=args.json,
error_type="CantRemoveRoot")
actions = {inst.PREFIX: prefix}
for dist in sorted(linked(prefix)):
plan.add_unlink(actions, dist)
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
common.error_and_exit('cannot remove %s from root environment' %
', '.join(common.root_no_rm),
json=args.json,
error_type="CantRemoveFromRoot")
actions = plan.remove_actions(prefix, specs, index=index, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
return
common.error_and_exit('no packages found to remove from '
'environment: %s' % prefix,
json=args.json,
error_type="PackageNotInstalled")
if not args.json:
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if args.json and args.dry_run:
common.stdout_json({
'success': True,
'dry_run': True,
'actions': actions
})
return
if not args.json:
common.confirm_yn(args)
if args.json and not args.quiet:
with json_progress_bars():
plan.execute_actions(actions, index, verbose=not args.quiet)
else:
plan.execute_actions(actions, index, verbose=not args.quiet)
if specs:
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# remove specs: %s\n' % specs)
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise
if args.all:
rm_rf(prefix)
if args.json:
common.stdout_json({
'success': True,
'actions': actions
})
| conda/cli/common.py
--- a/conda/cli/common.py
+++ b/conda/cli/common.py
@@ -180,6 +180,16 @@ def add_parser_use_index_cache(p):
help="Use cache of channel index files.",
)
+
+def add_parser_no_use_index_cache(p):
+ p.add_argument(
+ "--no-use-index-cache",
+ action="store_false",
+ default=True,
+ dest="use_index_cache",
+ help="Use cache of channel index files.",
+ )
+
def add_parser_copy(p):
p.add_argument(
'--copy',
conda/cli/main_remove.py
--- a/conda/cli/main_remove.py
+++ b/conda/cli/main_remove.py
@@ -68,6 +68,8 @@ def configure_parser(sub_parsers, name='remove'):
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
+ # Putting this one first makes it the default
+ common.add_parser_no_use_index_cache(p)
common.add_parser_use_index_cache(p)
common.add_parser_use_local(p)
common.add_parser_offline(p) |
Race condition for root environment detection
Periodically, when two conda processes are running at the same time, it is possible to see a race condition on determining whether the root environment is writable. Notice how the following produces two different configs from the same setup:
```
$ conda info & conda info
Current conda install:
platform : osx-64
conda version : 3.18.3
conda-build version : 1.18.0
python version : 2.7.10.final.0
requests version : 2.8.1
root environment : /Users/pelson/miniconda (read only)
default environment : /Users/pelson/miniconda
envs directories : /Users/pelson/.conda/envs
/Users/pelson/envs
/Users/pelson/miniconda/envs
package cache : /Users/pelson/.conda/envs/.pkgs
/Users/pelson/envs/.pkgs
/Users/pelson/miniconda/pkgs
...
Current conda install:
platform : osx-64
conda version : 3.18.3
conda-build version : 1.18.0
python version : 2.7.10.final.0
requests version : 2.8.1
root environment : /Users/pelson/miniconda (writable)
default environment : /Users/pelson/miniconda
envs directories : /Users/pelson/miniconda/envs
package cache : /Users/pelson/miniconda/pkgs
...
```
The offending line is in https://github.com/conda/conda/blob/master/conda/config.py#L135-L143 and https://github.com/conda/conda/blob/master/conda/utils.py#L43-L54. My assumption is that the `.conda-try-write` is being removed by the other process, and the exception is being raised in the `finally` block.
| conda/utils.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import logging
import sys
import hashlib
import collections
from functools import partial
from os.path import abspath, isdir, join
import os
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
def can_open(file):
"""
Return True if the given ``file`` can be opened for writing
"""
try:
fp = open(file, "ab")
fp.close()
return True
except IOError:
stderrlog.info("Unable to open %s\n" % file)
return False
def can_open_all(files):
"""
Return True if all of the provided ``files`` can be opened
"""
for f in files:
if not can_open(f):
return False
return True
def can_open_all_files_in_prefix(prefix, files):
"""
Returns True if all ``files`` at a given ``prefix`` can be opened
"""
return can_open_all((os.path.join(prefix, f) for f in files))
def try_write(dir_path):
assert isdir(dir_path)
try:
try:
with open(join(dir_path, '.conda-try-write'), mode='wb') as fo:
fo.write(b'This is a test file.\n')
return True
finally:
# XXX: If this raises an exception it will also return False
os.unlink(join(dir_path, '.conda-try-write'))
except (IOError, OSError):
return False
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(kw.items()))
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(kw.items()))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
<|code_end|>
| conda/utils.py
from __future__ import print_function, division, absolute_import
import logging
import sys
import hashlib
import collections
from functools import partial
from os.path import abspath, isdir
import os
import tempfile
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
def can_open(file):
"""
Return True if the given ``file`` can be opened for writing
"""
try:
fp = open(file, "ab")
fp.close()
return True
except IOError:
stderrlog.info("Unable to open %s\n" % file)
return False
def can_open_all(files):
"""
Return True if all of the provided ``files`` can be opened
"""
for f in files:
if not can_open(f):
return False
return True
def can_open_all_files_in_prefix(prefix, files):
"""
Returns True if all ``files`` at a given ``prefix`` can be opened
"""
return can_open_all((os.path.join(prefix, f) for f in files))
def try_write(dir_path):
assert isdir(dir_path)
try:
with tempfile.TemporaryFile(prefix='.conda-try-write',
dir=dir_path) as fo:
fo.write(b'This is a test file.\n')
return True
except (IOError, OSError):
return False
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(kw.items()))
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(kw.items()))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
| conda/utils.py
--- a/conda/utils.py
+++ b/conda/utils.py
@@ -5,8 +5,10 @@
import hashlib
import collections
from functools import partial
-from os.path import abspath, isdir, join
+from os.path import abspath, isdir
import os
+import tempfile
+
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
@@ -43,13 +45,10 @@ def can_open_all_files_in_prefix(prefix, files):
def try_write(dir_path):
assert isdir(dir_path)
try:
- try:
- with open(join(dir_path, '.conda-try-write'), mode='wb') as fo:
- fo.write(b'This is a test file.\n')
- return True
- finally:
- # XXX: If this raises an exception it will also return False
- os.unlink(join(dir_path, '.conda-try-write'))
+ with tempfile.TemporaryFile(prefix='.conda-try-write',
+ dir=dir_path) as fo:
+ fo.write(b'This is a test file.\n')
+ return True
except (IOError, OSError):
return False
|
conda clean -pt as non-root user with root anaconda install
I have installed root miniconda at /opt/anaconda. When running
```
conda clean -pt
```
as a lesser user than root, I am seeing errors indicating conda is not checking permissions before attempting to delete package dirs:
```
conda clean -pt
Cache location: /opt/anaconda/pkgs
Will remove the following tarballs:
/opt/anaconda/pkgs
------------------
conda-3.18.3-py27_0.tar.bz2 175 KB
conda-env-2.4.4-py27_0.tar.bz2 24 KB
itsdangerous-0.24-py27_0.tar.bz2 16 KB
markupsafe-0.23-py27_0.tar.bz2 30 KB
flask-0.10.1-py27_1.tar.bz2 129 KB
jinja2-2.8-py27_0.tar.bz2 263 KB
anaconda-build-0.12.0-py27_0.tar.bz2 69 KB
flask-wtf-0.8.4-py27_1.tar.bz2 12 KB
flask-ldap-login-0.3.0-py27_1.tar.bz2 13 KB
---------------------------------------------------
Total: 730 KB
removing conda-3.18.3-py27_0.tar.bz2
An unexpected error has occurred, please consider sending the
following traceback to the conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Include the output of the command 'conda info' in your report.
Traceback (most recent call last):
File "/opt/anaconda/envs/anaconda.org/bin/conda", line 5, in <module>
sys.exit(main())
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 195, in main
args_func(args, p)
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 202, in args_func
args.func(args, p)
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 331, in execute
rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 156, in rm_tarballs
os.unlink(os.path.join(pkgs_dir, fn))
OSError: [Errno 13] Permission denied: '/opt/anaconda/pkgs/conda-3.18.3-py27_0.tar.bz2'
```
| conda/cli/main_clean.py
<|code_start|>
# (c) 2012# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
from conda.cli import common
import conda.config as config
from conda.utils import human_bytes
from conda.install import rm_rf
descr = """
Remove unused packages and caches.
"""
example = """
Examples:
conda clean --tarballs
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'clean',
description=descr,
help=descr,
epilog=example,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"-i", "--index-cache",
action="store_true",
help="Remove index cache.",
)
p.add_argument(
"-l", "--lock",
action="store_true",
help="Remove all conda lock files.",
)
p.add_argument(
"-t", "--tarballs",
action="store_true",
help="Remove cached package tarballs.",
)
p.add_argument(
'-p', '--packages',
action='store_true',
help="""Remove unused cached packages. Warning: this does not check
for symlinked packages.""",
)
p.add_argument(
'-s', '--source-cache',
action='store_true',
help="""Remove files from the source cache of conda build.""",
)
p.set_defaults(func=execute)
def find_lock():
from os.path import join
from conda.lock import LOCKFN
lock_dirs = config.pkgs_dirs[:]
lock_dirs += [config.root_dir]
for envs_dir in config.envs_dirs:
if os.path.exists(envs_dir):
for fn in os.listdir(envs_dir):
if os.path.isdir(join(envs_dir, fn)):
lock_dirs.append(join(envs_dir, fn))
try:
from conda_build.config import croot
lock_dirs.append(croot)
except ImportError:
pass
for dir in lock_dirs:
if not os.path.exists(dir):
continue
for dn in os.listdir(dir):
if os.path.isdir(join(dir, dn)) and dn.startswith(LOCKFN):
path = join(dir, dn)
yield path
def rm_lock(locks, verbose=True):
for path in locks:
if verbose:
print('removing: %s' % path)
os.rmdir(path)
def find_tarballs():
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for fn in os.listdir(pkgs_dir):
if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
pkgs_dirs[pkgs_dir].append(fn)
totalsize = 0
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
totalsize += size
return pkgs_dirs, totalsize
def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no tarballs to remove")
return
if verbose:
print("Will remove the following tarballs:")
print()
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-'*len(pkgs_dir))
fmt = "%-40s %10s"
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
print(fmt % (fn, human_bytes(size)))
print()
print('-' * 51) # From 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % fn)
os.unlink(os.path.join(pkgs_dir, fn))
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
warnings = []
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
for pkg in pkgs:
breakit = False
for root, dir, files in walk(join(pkgs_dir, pkg)):
if breakit:
break
for fn in files:
try:
stat = lstat(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
if stat.st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break
else:
pkgs_dirs[pkgs_dir].append(pkg)
totalsize = 0
pkgsizes = defaultdict(list)
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
pkgsize = 0
for root, dir, files in walk(join(pkgs_dir, pkg)):
for fn in files:
# We don't have to worry about counting things twice: by
# definition these files all have a link count of 1!
size = lstat(join(root, fn)).st_size
totalsize += size
pkgsize += size
pkgsizes[pkgs_dir].append(pkgsize)
return pkgs_dirs, warnings, totalsize, pkgsizes
def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
for fn, exception in warnings:
print(exception)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-' * len(pkgs_dir))
print()
fmt = "%-40s %10s"
for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
print(fmt % (pkg, human_bytes(pkgsize)))
print()
print('-' * 51) # 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % pkg)
rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
from conda.install import rm_rf
rm_rf(join(config.pkgs_dirs[0], 'cache'))
def find_source_cache():
try:
import conda_build.source
except ImportError:
return {
'warnings': ["conda-build is not installed; could not clean source cache"],
'cache_dirs': [],
'cache_sizes': {},
'total_size': 0,
}
cache_dirs = {
'source cache': conda_build.source.SRC_CACHE,
'git cache': conda_build.source.GIT_CACHE,
'hg cache': conda_build.source.HG_CACHE,
'svn cache': conda_build.source.SVN_CACHE,
}
sizes = {}
totalsize = 0
for cache_type, cache_dir in cache_dirs.items():
dirsize = 0
for root, d, files in walk(cache_dir):
for fn in files:
size = lstat(join(root, fn)).st_size
totalsize += size
dirsize += size
sizes[cache_type] = dirsize
return {
'warnings': [],
'cache_dirs': cache_dirs,
'cache_sizes': sizes,
'total_size': totalsize,
}
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
verbose = not args.json
if warnings:
if verbose:
for warning in warnings:
print(warning, file=sys.stderr)
return
for cache_type in cache_dirs:
print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
print("%-40s %10s" % ("Size:",
human_bytes(cache_sizes[cache_type])))
print()
print("%-40s %10s" % ("Total:", human_bytes(total_size)))
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for dir in cache_dirs.values():
print("Removing %s" % dir)
rm_rf(dir)
def execute(args, parser):
json_result = {
'success': True
}
if args.lock:
locks = list(find_lock())
json_result['lock'] = {
'files': locks
}
rm_lock(locks, verbose=not args.json)
if args.tarballs:
pkgs_dirs, totalsize = find_tarballs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['tarballs'] = {
'pkgs_dir': first, # Backwards compabitility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize
}
rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
'files': [join(config.pkgs_dirs[0], 'cache')]
}
rm_index_cache()
if args.packages:
pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['packages'] = {
'pkgs_dir': first, # Backwards compatibility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize,
'warnings': warnings,
'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs},
}
rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache:
json_result['source_cache'] = find_source_cache()
rm_source_cache(args, **json_result['source_cache'])
if not (args.lock or args.tarballs or args.index_cache or args.packages or
args.source_cache):
common.error_and_exit(
"One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required",
error_type="ValueError")
if args.json:
common.stdout_json(json_result)
<|code_end|>
conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import abspath, basename, dirname, isdir, isfile, islink, join, relpath
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
os.unlink(path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if f.lower().startswith('menu/')
and f.lower().endswith('.json')]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
env_name = (None if abspath(prefix) == abspath(sys.prefix) else
basename(prefix))
env_setup_cmd = ("activate %s" % env_name) if env_name else None
for f in menu_files:
try:
if menuinst.__version__.startswith('1.0'):
menuinst.install(join(prefix, f), remove, prefix)
else:
menuinst.install(join(prefix, f), remove,
root_prefix=sys.prefix,
target_prefix=prefix, env_name=env_name,
env_setup_cmd=env_setup_cmd)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.lexists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.lexists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.lexists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.lexists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dist) else
LINK_COPY)
link(pkgs_dir, prefix, dist, linktype)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
<|code_end|>
| conda/cli/main_clean.py
# (c) 2012# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
from conda.cli import common
import conda.config as config
from conda.utils import human_bytes
from conda.install import rm_rf
descr = """
Remove unused packages and caches.
"""
example = """
Examples:
conda clean --tarballs
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'clean',
description=descr,
help=descr,
epilog=example,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"-i", "--index-cache",
action="store_true",
help="Remove index cache.",
)
p.add_argument(
"-l", "--lock",
action="store_true",
help="Remove all conda lock files.",
)
p.add_argument(
"-t", "--tarballs",
action="store_true",
help="Remove cached package tarballs.",
)
p.add_argument(
'-p', '--packages',
action='store_true',
help="""Remove unused cached packages. Warning: this does not check
for symlinked packages.""",
)
p.add_argument(
'-s', '--source-cache',
action='store_true',
help="""Remove files from the source cache of conda build.""",
)
p.set_defaults(func=execute)
def find_lock():
from os.path import join
from conda.lock import LOCKFN
lock_dirs = config.pkgs_dirs[:]
lock_dirs += [config.root_dir]
for envs_dir in config.envs_dirs:
if os.path.exists(envs_dir):
for fn in os.listdir(envs_dir):
if os.path.isdir(join(envs_dir, fn)):
lock_dirs.append(join(envs_dir, fn))
try:
from conda_build.config import croot
lock_dirs.append(croot)
except ImportError:
pass
for dir in lock_dirs:
if not os.path.exists(dir):
continue
for dn in os.listdir(dir):
if os.path.isdir(join(dir, dn)) and dn.startswith(LOCKFN):
path = join(dir, dn)
yield path
def rm_lock(locks, verbose=True):
for path in locks:
if verbose:
print('removing: %s' % path)
os.rmdir(path)
def find_tarballs():
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for fn in os.listdir(pkgs_dir):
if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
pkgs_dirs[pkgs_dir].append(fn)
totalsize = 0
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
totalsize += size
return pkgs_dirs, totalsize
def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no tarballs to remove")
return
if verbose:
print("Will remove the following tarballs:")
print()
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-'*len(pkgs_dir))
fmt = "%-40s %10s"
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
print(fmt % (fn, human_bytes(size)))
print()
print('-' * 51) # From 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
if os.access(os.path.join(pkgs_dir, fn), os.W_OK):
if verbose:
print("Removing %s" % fn)
os.unlink(os.path.join(pkgs_dir, fn))
else:
if verbose:
print("WARNING: cannot remove, file permissions: %s" % fn)
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
warnings = []
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
for pkg in pkgs:
breakit = False
for root, dir, files in walk(join(pkgs_dir, pkg)):
if breakit:
break
for fn in files:
try:
stat = lstat(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
if stat.st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break
else:
pkgs_dirs[pkgs_dir].append(pkg)
totalsize = 0
pkgsizes = defaultdict(list)
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
pkgsize = 0
for root, dir, files in walk(join(pkgs_dir, pkg)):
for fn in files:
# We don't have to worry about counting things twice: by
# definition these files all have a link count of 1!
size = lstat(join(root, fn)).st_size
totalsize += size
pkgsize += size
pkgsizes[pkgs_dir].append(pkgsize)
return pkgs_dirs, warnings, totalsize, pkgsizes
def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
for fn, exception in warnings:
print(exception)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-' * len(pkgs_dir))
print()
fmt = "%-40s %10s"
for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
print(fmt % (pkg, human_bytes(pkgsize)))
print()
print('-' * 51) # 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % pkg)
rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
from conda.install import rm_rf
rm_rf(join(config.pkgs_dirs[0], 'cache'))
def find_source_cache():
try:
import conda_build.source
except ImportError:
return {
'warnings': ["conda-build is not installed; could not clean source cache"],
'cache_dirs': [],
'cache_sizes': {},
'total_size': 0,
}
cache_dirs = {
'source cache': conda_build.source.SRC_CACHE,
'git cache': conda_build.source.GIT_CACHE,
'hg cache': conda_build.source.HG_CACHE,
'svn cache': conda_build.source.SVN_CACHE,
}
sizes = {}
totalsize = 0
for cache_type, cache_dir in cache_dirs.items():
dirsize = 0
for root, d, files in walk(cache_dir):
for fn in files:
size = lstat(join(root, fn)).st_size
totalsize += size
dirsize += size
sizes[cache_type] = dirsize
return {
'warnings': [],
'cache_dirs': cache_dirs,
'cache_sizes': sizes,
'total_size': totalsize,
}
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
verbose = not args.json
if warnings:
if verbose:
for warning in warnings:
print(warning, file=sys.stderr)
return
for cache_type in cache_dirs:
print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
print("%-40s %10s" % ("Size:",
human_bytes(cache_sizes[cache_type])))
print()
print("%-40s %10s" % ("Total:", human_bytes(total_size)))
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for dir in cache_dirs.values():
print("Removing %s" % dir)
rm_rf(dir)
def execute(args, parser):
json_result = {
'success': True
}
if args.lock:
locks = list(find_lock())
json_result['lock'] = {
'files': locks
}
rm_lock(locks, verbose=not args.json)
if args.tarballs:
pkgs_dirs, totalsize = find_tarballs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['tarballs'] = {
'pkgs_dir': first, # Backwards compabitility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize
}
rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
'files': [join(config.pkgs_dirs[0], 'cache')]
}
rm_index_cache()
if args.packages:
pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['packages'] = {
'pkgs_dir': first, # Backwards compatibility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize,
'warnings': warnings,
'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs},
}
rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache:
json_result['source_cache'] = find_source_cache()
rm_source_cache(args, **json_result['source_cache'])
if not (args.lock or args.tarballs or args.index_cache or args.packages or
args.source_cache):
common.error_and_exit(
"One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required",
error_type="ValueError")
if args.json:
common.stdout_json(json_result)
conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import abspath, basename, dirname, isdir, isfile, islink, join, relpath
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
if os.access(path, os.W_OK):
os.unlink(path)
else:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if f.lower().startswith('menu/')
and f.lower().endswith('.json')]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except ImportError as e:
logging.warn("Menuinst could not be imported:")
logging.warn(e.message)
return
env_name = (None if abspath(prefix) == abspath(sys.prefix) else
basename(prefix))
env_setup_cmd = ("activate %s" % env_name) if env_name else None
for f in menu_files:
try:
if menuinst.__version__.startswith('1.0'):
menuinst.install(join(prefix, f), remove, prefix)
else:
menuinst.install(join(prefix, f), remove,
root_prefix=sys.prefix,
target_prefix=prefix, env_name=env_name,
env_setup_cmd=env_setup_cmd)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.lexists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.lexists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.lexists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.lexists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dist) else
LINK_COPY)
link(pkgs_dir, prefix, dist, linktype)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
| conda/cli/main_clean.py
--- a/conda/cli/main_clean.py
+++ b/conda/cli/main_clean.py
@@ -151,9 +151,13 @@ def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
- if verbose:
- print("removing %s" % fn)
- os.unlink(os.path.join(pkgs_dir, fn))
+ if os.access(os.path.join(pkgs_dir, fn), os.W_OK):
+ if verbose:
+ print("Removing %s" % fn)
+ os.unlink(os.path.join(pkgs_dir, fn))
+ else:
+ if verbose:
+ print("WARNING: cannot remove, file permissions: %s" % fn)
def find_pkgs():
conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -138,6 +138,13 @@ def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
+def warn_failed_remove(function, path, exc_info):
+ if exc_info[1].errno == errno.EACCES:
+ log.warn("Cannot remove, permission denied: {0}".format(path))
+ elif exc_info[1].errno == errno.ENOTEMPTY:
+ log.warn("Cannot remove, not empty: {0}".format(path))
+ else:
+ log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
@@ -152,12 +159,15 @@ def rm_rf(path, max_retries=5, trash=True):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
- os.unlink(path)
+ if os.access(path, os.W_OK):
+ os.unlink(path)
+ else:
+ log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
for i in range(max_retries):
try:
- shutil.rmtree(path)
+ shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
@@ -189,7 +199,7 @@ def rm_rf(path, max_retries=5, trash=True):
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
- shutil.rmtree(path)
+ shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
def rm_empty_dir(path):
""" |
conda clean -pt with empty package cache and non-root user
I have a root miniconda install at /opt/anaconda. I ran
```
conda clean -pt
```
successfully as root then immediately tried running the same command as a lesser user. I got this error even though the package cache was empty:
```
Cache location:
There are no tarballs to remove
An unexpected error has occurred, please consider sending the
following traceback to the conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Include the output of the command 'conda info' in your report.
Traceback (most recent call last):
File "/opt/anaconda/envs/anaconda.org/bin/conda", line 5, in <module>
sys.exit(main())
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 195, in main
args_func(args, p)
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 202, in args_func
args.func(args, p)
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 340, in execute
pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 166, in find_pkgs
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
OSError: [Errno 2] No such file or directory: '/home/user5/envs/.pkgs'
```
| conda/cli/main_clean.py
<|code_start|>
# (c) 2012# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
from conda.cli import common
import conda.config as config
from conda.utils import human_bytes
from conda.install import rm_rf
descr = """
Remove unused packages and caches.
"""
example = """
Examples:
conda clean --tarballs
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'clean',
description=descr,
help=descr,
epilog=example,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"-i", "--index-cache",
action="store_true",
help="Remove index cache.",
)
p.add_argument(
"-l", "--lock",
action="store_true",
help="Remove all conda lock files.",
)
p.add_argument(
"-t", "--tarballs",
action="store_true",
help="Remove cached package tarballs.",
)
p.add_argument(
'-p', '--packages',
action='store_true',
help="""Remove unused cached packages. Warning: this does not check
for symlinked packages.""",
)
p.add_argument(
'-s', '--source-cache',
action='store_true',
help="""Remove files from the source cache of conda build.""",
)
p.set_defaults(func=execute)
def find_lock():
from os.path import join
from conda.lock import LOCKFN
lock_dirs = config.pkgs_dirs[:]
lock_dirs += [config.root_dir]
for envs_dir in config.envs_dirs:
if os.path.exists(envs_dir):
for fn in os.listdir(envs_dir):
if os.path.isdir(join(envs_dir, fn)):
lock_dirs.append(join(envs_dir, fn))
try:
from conda_build.config import croot
lock_dirs.append(croot)
except ImportError:
pass
for dir in lock_dirs:
if not os.path.exists(dir):
continue
for dn in os.listdir(dir):
if os.path.isdir(join(dir, dn)) and dn.startswith(LOCKFN):
path = join(dir, dn)
yield path
def rm_lock(locks, verbose=True):
for path in locks:
if verbose:
print('removing: %s' % path)
os.rmdir(path)
def find_tarballs():
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for fn in os.listdir(pkgs_dir):
if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
pkgs_dirs[pkgs_dir].append(fn)
totalsize = 0
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
totalsize += size
return pkgs_dirs, totalsize
def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no tarballs to remove")
return
if verbose:
print("Will remove the following tarballs:")
print()
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-'*len(pkgs_dir))
fmt = "%-40s %10s"
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
print(fmt % (fn, human_bytes(size)))
print()
print('-' * 51) # From 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % fn)
os.unlink(os.path.join(pkgs_dir, fn))
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
warnings = []
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
for pkg in pkgs:
breakit = False
for root, dir, files in walk(join(pkgs_dir, pkg)):
if breakit:
break
for fn in files:
try:
stat = lstat(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
if stat.st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break
else:
pkgs_dirs[pkgs_dir].append(pkg)
totalsize = 0
pkgsizes = defaultdict(list)
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
pkgsize = 0
for root, dir, files in walk(join(pkgs_dir, pkg)):
for fn in files:
# We don't have to worry about counting things twice: by
# definition these files all have a link count of 1!
size = lstat(join(root, fn)).st_size
totalsize += size
pkgsize += size
pkgsizes[pkgs_dir].append(pkgsize)
return pkgs_dirs, warnings, totalsize, pkgsizes
def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
for fn, exception in warnings:
print(exception)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-' * len(pkgs_dir))
print()
fmt = "%-40s %10s"
for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
print(fmt % (pkg, human_bytes(pkgsize)))
print()
print('-' * 51) # 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % pkg)
rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
from conda.install import rm_rf
rm_rf(join(config.pkgs_dirs[0], 'cache'))
def find_source_cache():
try:
import conda_build.source
except ImportError:
return {
'warnings': ["conda-build is not installed; could not clean source cache"],
'cache_dirs': [],
'cache_sizes': {},
'total_size': 0,
}
cache_dirs = {
'source cache': conda_build.source.SRC_CACHE,
'git cache': conda_build.source.GIT_CACHE,
'hg cache': conda_build.source.HG_CACHE,
'svn cache': conda_build.source.SVN_CACHE,
}
sizes = {}
totalsize = 0
for cache_type, cache_dir in cache_dirs.items():
dirsize = 0
for root, d, files in walk(cache_dir):
for fn in files:
size = lstat(join(root, fn)).st_size
totalsize += size
dirsize += size
sizes[cache_type] = dirsize
return {
'warnings': [],
'cache_dirs': cache_dirs,
'cache_sizes': sizes,
'total_size': totalsize,
}
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
verbose = not args.json
if warnings:
if verbose:
for warning in warnings:
print(warning, file=sys.stderr)
return
for cache_type in cache_dirs:
print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
print("%-40s %10s" % ("Size:",
human_bytes(cache_sizes[cache_type])))
print()
print("%-40s %10s" % ("Total:", human_bytes(total_size)))
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for dir in cache_dirs.values():
print("Removing %s" % dir)
rm_rf(dir)
def execute(args, parser):
json_result = {
'success': True
}
if args.lock:
locks = list(find_lock())
json_result['lock'] = {
'files': locks
}
rm_lock(locks, verbose=not args.json)
if args.tarballs:
pkgs_dirs, totalsize = find_tarballs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['tarballs'] = {
'pkgs_dir': first, # Backwards compabitility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize
}
rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
'files': [join(config.pkgs_dirs[0], 'cache')]
}
rm_index_cache()
if args.packages:
pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['packages'] = {
'pkgs_dir': first, # Backwards compatibility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize,
'warnings': warnings,
'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs},
}
rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache:
json_result['source_cache'] = find_source_cache()
rm_source_cache(args, **json_result['source_cache'])
if not (args.lock or args.tarballs or args.index_cache or args.packages or
args.source_cache):
common.error_and_exit(
"One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required",
error_type="ValueError")
if args.json:
common.stdout_json(json_result)
<|code_end|>
conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import abspath, basename, dirname, isdir, isfile, islink, join, relpath
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
os.unlink(path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if f.lower().startswith('menu/')
and f.lower().endswith('.json')]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
env_name = (None if abspath(prefix) == abspath(sys.prefix) else
basename(prefix))
env_setup_cmd = ("activate %s" % env_name) if env_name else None
for f in menu_files:
try:
if menuinst.__version__.startswith('1.0'):
menuinst.install(join(prefix, f), remove, prefix)
else:
menuinst.install(join(prefix, f), remove,
root_prefix=sys.prefix,
target_prefix=prefix, env_name=env_name,
env_setup_cmd=env_setup_cmd)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.lexists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.lexists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.lexists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.lexists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dist) else
LINK_COPY)
link(pkgs_dir, prefix, dist, linktype)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
<|code_end|>
| conda/cli/main_clean.py
# (c) 2012# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
from conda.cli import common
import conda.config as config
from conda.utils import human_bytes
from conda.install import rm_rf
descr = """
Remove unused packages and caches.
"""
example = """
Examples:
conda clean --tarballs
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'clean',
description=descr,
help=descr,
epilog=example,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"-i", "--index-cache",
action="store_true",
help="Remove index cache.",
)
p.add_argument(
"-l", "--lock",
action="store_true",
help="Remove all conda lock files.",
)
p.add_argument(
"-t", "--tarballs",
action="store_true",
help="Remove cached package tarballs.",
)
p.add_argument(
'-p', '--packages',
action='store_true',
help="""Remove unused cached packages. Warning: this does not check
for symlinked packages.""",
)
p.add_argument(
'-s', '--source-cache',
action='store_true',
help="""Remove files from the source cache of conda build.""",
)
p.set_defaults(func=execute)
def find_lock():
from os.path import join
from conda.lock import LOCKFN
lock_dirs = config.pkgs_dirs[:]
lock_dirs += [config.root_dir]
for envs_dir in config.envs_dirs:
if os.path.exists(envs_dir):
for fn in os.listdir(envs_dir):
if os.path.isdir(join(envs_dir, fn)):
lock_dirs.append(join(envs_dir, fn))
try:
from conda_build.config import croot
lock_dirs.append(croot)
except ImportError:
pass
for dir in lock_dirs:
if not os.path.exists(dir):
continue
for dn in os.listdir(dir):
if os.path.isdir(join(dir, dn)) and dn.startswith(LOCKFN):
path = join(dir, dn)
yield path
def rm_lock(locks, verbose=True):
for path in locks:
if verbose:
print('removing: %s' % path)
os.rmdir(path)
def find_tarballs():
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for fn in os.listdir(pkgs_dir):
if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
pkgs_dirs[pkgs_dir].append(fn)
totalsize = 0
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
totalsize += size
return pkgs_dirs, totalsize
def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no tarballs to remove")
return
if verbose:
print("Will remove the following tarballs:")
print()
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-'*len(pkgs_dir))
fmt = "%-40s %10s"
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
print(fmt % (fn, human_bytes(size)))
print()
print('-' * 51) # From 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
if os.access(os.path.join(pkgs_dir, fn), os.W_OK):
if verbose:
print("Removing %s" % fn)
os.unlink(os.path.join(pkgs_dir, fn))
else:
if verbose:
print("WARNING: cannot remove, file permissions: %s" % fn)
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
warnings = []
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not os.path.exists(pkgs_dir):
print("WARNING: {0} does not exist".format(pkgs_dir))
continue
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
for pkg in pkgs:
breakit = False
for root, dir, files in walk(join(pkgs_dir, pkg)):
if breakit:
break
for fn in files:
try:
stat = lstat(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
if stat.st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break
else:
pkgs_dirs[pkgs_dir].append(pkg)
totalsize = 0
pkgsizes = defaultdict(list)
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
pkgsize = 0
for root, dir, files in walk(join(pkgs_dir, pkg)):
for fn in files:
# We don't have to worry about counting things twice: by
# definition these files all have a link count of 1!
size = lstat(join(root, fn)).st_size
totalsize += size
pkgsize += size
pkgsizes[pkgs_dir].append(pkgsize)
return pkgs_dirs, warnings, totalsize, pkgsizes
def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
for fn, exception in warnings:
print(exception)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-' * len(pkgs_dir))
print()
fmt = "%-40s %10s"
for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
print(fmt % (pkg, human_bytes(pkgsize)))
print()
print('-' * 51) # 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % pkg)
rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
from conda.install import rm_rf
rm_rf(join(config.pkgs_dirs[0], 'cache'))
def find_source_cache():
try:
import conda_build.source
except ImportError:
return {
'warnings': ["conda-build is not installed; could not clean source cache"],
'cache_dirs': [],
'cache_sizes': {},
'total_size': 0,
}
cache_dirs = {
'source cache': conda_build.source.SRC_CACHE,
'git cache': conda_build.source.GIT_CACHE,
'hg cache': conda_build.source.HG_CACHE,
'svn cache': conda_build.source.SVN_CACHE,
}
sizes = {}
totalsize = 0
for cache_type, cache_dir in cache_dirs.items():
dirsize = 0
for root, d, files in walk(cache_dir):
for fn in files:
size = lstat(join(root, fn)).st_size
totalsize += size
dirsize += size
sizes[cache_type] = dirsize
return {
'warnings': [],
'cache_dirs': cache_dirs,
'cache_sizes': sizes,
'total_size': totalsize,
}
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
verbose = not args.json
if warnings:
if verbose:
for warning in warnings:
print(warning, file=sys.stderr)
return
for cache_type in cache_dirs:
print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
print("%-40s %10s" % ("Size:",
human_bytes(cache_sizes[cache_type])))
print()
print("%-40s %10s" % ("Total:", human_bytes(total_size)))
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for dir in cache_dirs.values():
print("Removing %s" % dir)
rm_rf(dir)
def execute(args, parser):
json_result = {
'success': True
}
if args.lock:
locks = list(find_lock())
json_result['lock'] = {
'files': locks
}
rm_lock(locks, verbose=not args.json)
if args.tarballs:
pkgs_dirs, totalsize = find_tarballs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['tarballs'] = {
'pkgs_dir': first, # Backwards compabitility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize
}
rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
'files': [join(config.pkgs_dirs[0], 'cache')]
}
rm_index_cache()
if args.packages:
pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['packages'] = {
'pkgs_dir': first, # Backwards compatibility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize,
'warnings': warnings,
'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs},
}
rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache:
json_result['source_cache'] = find_source_cache()
rm_source_cache(args, **json_result['source_cache'])
if not (args.lock or args.tarballs or args.index_cache or args.packages or
args.source_cache):
common.error_and_exit(
"One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required",
error_type="ValueError")
if args.json:
common.stdout_json(json_result)
conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import abspath, basename, dirname, isdir, isfile, islink, join, relpath
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
on_win = bool(sys.platform == 'win32')
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn( "WARNING: cannot remove, permission denied: %s" % path )
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn( "WARNING: cannot remove, not empty: %s" % path )
else:
log.warn( "WARNING: cannot remove, unknown reason: %s" % path )
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
if os.access(path, os.W_OK):
os.unlink(path)
else:
log.warn("WARNING: cannot remove, permission denied: %s" % path)
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if f.lower().startswith('menu/')
and f.lower().endswith('.json')]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except ImportError as e:
logging.warn("Menuinst could not be imported:")
logging.warn(e.message)
return
env_name = (None if abspath(prefix) == abspath(sys.prefix) else
basename(prefix))
env_setup_cmd = ("activate %s" % env_name) if env_name else None
for f in menu_files:
try:
if menuinst.__version__.startswith('1.0'):
menuinst.install(join(prefix, f), remove, prefix)
else:
menuinst.install(join(prefix, f), remove,
root_prefix=sys.prefix,
target_prefix=prefix, env_name=env_name,
env_setup_cmd=env_setup_cmd)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
args = ['/bin/bash', path]
env = os.environ
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir):
root_conda = join(root_dir, 'bin', 'conda')
root_activate = join(root_dir, 'bin', 'activate')
root_deactivate = join(root_dir, 'bin', 'deactivate')
prefix_conda = join(prefix, 'bin', 'conda')
prefix_activate = join(prefix, 'bin', 'activate')
prefix_deactivate = join(prefix, 'bin', 'deactivate')
if not os.path.lexists(join(prefix, 'bin')):
os.makedirs(join(prefix, 'bin'))
if not os.path.lexists(prefix_conda):
os.symlink(root_conda, prefix_conda)
if not os.path.lexists(prefix_activate):
os.symlink(root_activate, prefix_activate)
if not os.path.lexists(prefix_deactivate):
os.symlink(root_deactivate, prefix_deactivate)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
# =========================== end API functions ==========================
def main():
from pprint import pprint
from optparse import OptionParser
p = OptionParser(
usage="usage: %prog [options] [TARBALL/NAME]",
description="low-level conda install tool, by default extracts "
"(if necessary) and links a TARBALL")
p.add_option('-l', '--list',
action="store_true",
help="list all linked packages")
p.add_option('--extract',
action="store_true",
help="extract package in pkgs cache")
p.add_option('--link',
action="store_true",
help="link a package")
p.add_option('--unlink',
action="store_true",
help="unlink a package")
p.add_option('-p', '--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('--pkgs-dir',
action="store",
default=join(sys.prefix, 'pkgs'),
help="packages directory (defaults to %default)")
p.add_option('--link-all',
action="store_true",
help="link all extracted packages")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
logging.basicConfig()
if opts.list or opts.extract or opts.link_all:
if args:
p.error('no arguments expected')
else:
if len(args) == 1:
dist = basename(args[0])
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
else:
p.error('exactly one argument expected')
pkgs_dir = opts.pkgs_dir
prefix = opts.prefix
if opts.verbose:
print("pkgs_dir: %r" % pkgs_dir)
print("prefix : %r" % prefix)
if opts.list:
pprint(sorted(linked(prefix)))
elif opts.link_all:
dists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dists[0]) else
LINK_COPY)
if opts.verbose or linktype == LINK_COPY:
print("linktype: %s" % link_name_map[linktype])
for dist in dists:
if opts.verbose or linktype == LINK_COPY:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
elif opts.extract:
extract(pkgs_dir, dist)
elif opts.link:
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, dist) else
LINK_COPY)
link(pkgs_dir, prefix, dist, linktype)
elif opts.unlink:
unlink(prefix, dist)
if __name__ == '__main__':
main()
| conda/cli/main_clean.py
--- a/conda/cli/main_clean.py
+++ b/conda/cli/main_clean.py
@@ -151,9 +151,13 @@ def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
- if verbose:
- print("removing %s" % fn)
- os.unlink(os.path.join(pkgs_dir, fn))
+ if os.access(os.path.join(pkgs_dir, fn), os.W_OK):
+ if verbose:
+ print("Removing %s" % fn)
+ os.unlink(os.path.join(pkgs_dir, fn))
+ else:
+ if verbose:
+ print("WARNING: cannot remove, file permissions: %s" % fn)
def find_pkgs():
@@ -163,6 +167,9 @@ def find_pkgs():
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
+ if not os.path.exists(pkgs_dir):
+ print("WARNING: {0} does not exist".format(pkgs_dir))
+ continue
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -138,6 +138,13 @@ def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
+def warn_failed_remove(function, path, exc_info):
+ if exc_info[1].errno == errno.EACCES:
+ log.warn( "WARNING: cannot remove, permission denied: %s" % path )
+ elif exc_info[1].errno == errno.ENOTEMPTY:
+ log.warn( "WARNING: cannot remove, not empty: %s" % path )
+ else:
+ log.warn( "WARNING: cannot remove, unknown reason: %s" % path )
def rm_rf(path, max_retries=5, trash=True):
"""
@@ -152,12 +159,15 @@ def rm_rf(path, max_retries=5, trash=True):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
- os.unlink(path)
+ if os.access(path, os.W_OK):
+ os.unlink(path)
+ else:
+ log.warn("WARNING: cannot remove, permission denied: %s" % path)
elif isdir(path):
for i in range(max_retries):
try:
- shutil.rmtree(path)
+ shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
@@ -189,7 +199,7 @@ def rm_rf(path, max_retries=5, trash=True):
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
- shutil.rmtree(path)
+ shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
def rm_empty_dir(path):
""" |
conda clean -p breaks hard linking of new installs
@asmeurer
Following on from conda/conda#68:
On Windows 8 (and probably equally on all other windows versions), `conda clean -p` removes all extracted packaged from the `pkgs` directory even if they are hard linked in some environments.
While all existing environments will continue to function, creation of a new environment with the same packages causes conda to extract the packages again and to hard link to the newly extracted files. This massively increases disk usage and results in the opposite of what a user running `conda clean` was attempting to achieve.
The problem lies in `main_clean.py` and the fact that on Windows `lstat(file).st_nlink` always returns 0, even if `file` is hard linked. (This seems to have been fixed from python 3.2 onwards: https://bugs.python.org/issue10027)
As a stop gap measure `conda clean -p` should be prevented from being run on windows until a better solution can be found.
```
C:\>conda list
# packages in environment at C:\Anaconda:
#
conda 3.10.1 py27_0
conda-env 2.1.4 py27_0
menuinst 1.0.4 py27_0
psutil 2.2.1 py27_0
pycosat 0.6.1 py27_0
python 2.7.9 1
pyyaml 3.11 py27_0
requests 2.6.0 py27_0
```
| conda/cli/main_clean.py
<|code_start|>
# (c) 2012# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
from conda.cli import common
import conda.config as config
from conda.utils import human_bytes
from conda.install import rm_rf
descr = """
Remove unused packages and caches.
"""
example = """
Examples:
conda clean --tarballs
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'clean',
description=descr,
help=descr,
epilog=example,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"-i", "--index-cache",
action="store_true",
help="Remove index cache.",
)
p.add_argument(
"-l", "--lock",
action="store_true",
help="Remove all conda lock files.",
)
p.add_argument(
"-t", "--tarballs",
action="store_true",
help="Remove cached package tarballs.",
)
p.add_argument(
'-p', '--packages',
action='store_true',
help="""Remove unused cached packages. Warning: this does not check
for symlinked packages.""",
)
p.add_argument(
'-s', '--source-cache',
action='store_true',
help="""Remove files from the source cache of conda build.""",
)
p.set_defaults(func=execute)
def find_lock():
from os.path import join
from conda.lock import LOCKFN
lock_dirs = config.pkgs_dirs[:]
lock_dirs += [config.root_dir]
for envs_dir in config.envs_dirs:
if os.path.exists(envs_dir):
for fn in os.listdir(envs_dir):
if os.path.isdir(join(envs_dir, fn)):
lock_dirs.append(join(envs_dir, fn))
try:
from conda_build.config import croot
lock_dirs.append(croot)
except ImportError:
pass
for dir in lock_dirs:
if not os.path.exists(dir):
continue
for dn in os.listdir(dir):
if os.path.isdir(join(dir, dn)) and dn.startswith(LOCKFN):
path = join(dir, dn)
yield path
def rm_lock(locks, verbose=True):
for path in locks:
if verbose:
print('removing: %s' % path)
os.rmdir(path)
def find_tarballs():
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for fn in os.listdir(pkgs_dir):
if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
pkgs_dirs[pkgs_dir].append(fn)
totalsize = 0
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
totalsize += size
return pkgs_dirs, totalsize
def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no tarballs to remove")
return
if verbose:
print("Will remove the following tarballs:")
print()
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-'*len(pkgs_dir))
fmt = "%-40s %10s"
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
print(fmt % (fn, human_bytes(size)))
print()
print('-' * 51) # From 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
if os.access(os.path.join(pkgs_dir, fn), os.W_OK):
if verbose:
print("Removing %s" % fn)
os.unlink(os.path.join(pkgs_dir, fn))
else:
if verbose:
print("WARNING: cannot remove, file permissions: %s" % fn)
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
warnings = []
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not os.path.exists(pkgs_dir):
print("WARNING: {0} does not exist".format(pkgs_dir))
continue
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
for pkg in pkgs:
breakit = False
for root, dir, files in walk(join(pkgs_dir, pkg)):
if breakit:
break
for fn in files:
try:
stat = lstat(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
if stat.st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break
else:
pkgs_dirs[pkgs_dir].append(pkg)
totalsize = 0
pkgsizes = defaultdict(list)
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
pkgsize = 0
for root, dir, files in walk(join(pkgs_dir, pkg)):
for fn in files:
# We don't have to worry about counting things twice: by
# definition these files all have a link count of 1!
size = lstat(join(root, fn)).st_size
totalsize += size
pkgsize += size
pkgsizes[pkgs_dir].append(pkgsize)
return pkgs_dirs, warnings, totalsize, pkgsizes
def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
for fn, exception in warnings:
print(exception)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-' * len(pkgs_dir))
print()
fmt = "%-40s %10s"
for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
print(fmt % (pkg, human_bytes(pkgsize)))
print()
print('-' * 51) # 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % pkg)
rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
from conda.install import rm_rf
rm_rf(join(config.pkgs_dirs[0], 'cache'))
def find_source_cache():
try:
import conda_build.source
except ImportError:
return {
'warnings': ["conda-build is not installed; could not clean source cache"],
'cache_dirs': [],
'cache_sizes': {},
'total_size': 0,
}
cache_dirs = {
'source cache': conda_build.source.SRC_CACHE,
'git cache': conda_build.source.GIT_CACHE,
'hg cache': conda_build.source.HG_CACHE,
'svn cache': conda_build.source.SVN_CACHE,
}
sizes = {}
totalsize = 0
for cache_type, cache_dir in cache_dirs.items():
dirsize = 0
for root, d, files in walk(cache_dir):
for fn in files:
size = lstat(join(root, fn)).st_size
totalsize += size
dirsize += size
sizes[cache_type] = dirsize
return {
'warnings': [],
'cache_dirs': cache_dirs,
'cache_sizes': sizes,
'total_size': totalsize,
}
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
verbose = not args.json
if warnings:
if verbose:
for warning in warnings:
print(warning, file=sys.stderr)
return
for cache_type in cache_dirs:
print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
print("%-40s %10s" % ("Size:",
human_bytes(cache_sizes[cache_type])))
print()
print("%-40s %10s" % ("Total:", human_bytes(total_size)))
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for dir in cache_dirs.values():
print("Removing %s" % dir)
rm_rf(dir)
def execute(args, parser):
json_result = {
'success': True
}
if args.lock:
locks = list(find_lock())
json_result['lock'] = {
'files': locks
}
rm_lock(locks, verbose=not args.json)
if args.tarballs:
pkgs_dirs, totalsize = find_tarballs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['tarballs'] = {
'pkgs_dir': first, # Backwards compabitility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize
}
rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
'files': [join(config.pkgs_dirs[0], 'cache')]
}
rm_index_cache()
if args.packages:
pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['packages'] = {
'pkgs_dir': first, # Backwards compatibility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize,
'warnings': warnings,
'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs},
}
rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache:
json_result['source_cache'] = find_source_cache()
rm_source_cache(args, **json_result['source_cache'])
if not (args.lock or args.tarballs or args.index_cache or args.packages or
args.source_cache):
common.error_and_exit(
"One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required",
error_type="ValueError")
if args.json:
common.stdout_json(json_result)
<|code_end|>
| conda/cli/main_clean.py
# (c) 2012# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from os.path import join, getsize, isdir
from os import lstat, walk, listdir
from conda.cli import common
import conda.config as config
from conda.utils import human_bytes
from conda.install import rm_rf
descr = """
Remove unused packages and caches.
"""
example = """
Examples:
conda clean --tarballs
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'clean',
description=descr,
help=descr,
epilog=example,
)
common.add_parser_yes(p)
common.add_parser_json(p)
p.add_argument(
"-i", "--index-cache",
action="store_true",
help="Remove index cache.",
)
p.add_argument(
"-l", "--lock",
action="store_true",
help="Remove all conda lock files.",
)
p.add_argument(
"-t", "--tarballs",
action="store_true",
help="Remove cached package tarballs.",
)
p.add_argument(
'-p', '--packages',
action='store_true',
help="""Remove unused cached packages. Warning: this does not check
for symlinked packages.""",
)
p.add_argument(
'-s', '--source-cache',
action='store_true',
help="""Remove files from the source cache of conda build.""",
)
p.set_defaults(func=execute)
# work-around for python bug on Windows prior to python 3.2
# https://bugs.python.org/issue10027
# Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation
class CrossPlatformStLink(object):
_st_nlink = None
def __call__(self, path):
return self.st_nlink(path)
@classmethod
def st_nlink(cls, path):
if cls._st_nlink is None:
cls._initialize()
return cls._st_nlink(path)
@classmethod
def _standard_st_nlink(cls, path):
return lstat(path).st_nlink
@classmethod
def _windows_st_nlink(cls, path):
st_nlink = cls._standard_st_nlink(path)
if st_nlink != 0:
return st_nlink
else:
# cannot trust python on Windows when st_nlink == 0
# get value using windows libraries to be sure of its true value
# Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation
GENERIC_READ = 0x80000000
FILE_SHARE_READ = 0x00000001
OPEN_EXISTING = 3
hfile = cls.CreateFile(path, GENERIC_READ, FILE_SHARE_READ, None,
OPEN_EXISTING, 0, None)
if hfile is None:
from ctypes import WinError
raise WinError()
info = cls.BY_HANDLE_FILE_INFORMATION()
rv = cls.GetFileInformationByHandle(hfile, info)
cls.CloseHandle(hfile)
if rv == 0:
from ctypes import WinError
raise WinError()
return info.nNumberOfLinks
@classmethod
def _initialize(cls):
if os.name != 'nt':
cls._st_nlink = cls._standard_st_nlink
else:
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa363858
import ctypes
from ctypes import POINTER, WinError
from ctypes.wintypes import DWORD, HANDLE, BOOL
cls.CreateFile = ctypes.windll.kernel32.CreateFileW
cls.CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p,
DWORD, DWORD, HANDLE]
cls.CreateFile.restype = HANDLE
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms724211
cls.CloseHandle = ctypes.windll.kernel32.CloseHandle
cls.CloseHandle.argtypes = [HANDLE]
cls.CloseHandle.restype = BOOL
class FILETIME(ctypes.Structure):
_fields_ = [("dwLowDateTime", DWORD),
("dwHighDateTime", DWORD)]
class BY_HANDLE_FILE_INFORMATION(ctypes.Structure):
_fields_ = [("dwFileAttributes", DWORD),
("ftCreationTime", FILETIME),
("ftLastAccessTime", FILETIME),
("ftLastWriteTime", FILETIME),
("dwVolumeSerialNumber", DWORD),
("nFileSizeHigh", DWORD),
("nFileSizeLow", DWORD),
("nNumberOfLinks", DWORD),
("nFileIndexHigh", DWORD),
("nFileIndexLow", DWORD)]
cls.BY_HANDLE_FILE_INFORMATION = BY_HANDLE_FILE_INFORMATION
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa364952
cls.GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle
cls.GetFileInformationByHandle.argtypes = [HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)]
cls.GetFileInformationByHandle.restype = BOOL
cls._st_nlink = cls._windows_st_nlink
def find_lock():
from os.path import join
from conda.lock import LOCKFN
lock_dirs = config.pkgs_dirs[:]
lock_dirs += [config.root_dir]
for envs_dir in config.envs_dirs:
if os.path.exists(envs_dir):
for fn in os.listdir(envs_dir):
if os.path.isdir(join(envs_dir, fn)):
lock_dirs.append(join(envs_dir, fn))
try:
from conda_build.config import croot
lock_dirs.append(croot)
except ImportError:
pass
for dir in lock_dirs:
if not os.path.exists(dir):
continue
for dn in os.listdir(dir):
if os.path.isdir(join(dir, dn)) and dn.startswith(LOCKFN):
path = join(dir, dn)
yield path
def rm_lock(locks, verbose=True):
for path in locks:
if verbose:
print('removing: %s' % path)
os.rmdir(path)
def find_tarballs():
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not isdir(pkgs_dir):
continue
for fn in os.listdir(pkgs_dir):
if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'):
pkgs_dirs[pkgs_dir].append(fn)
totalsize = 0
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
totalsize += size
return pkgs_dirs, totalsize
def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no tarballs to remove")
return
if verbose:
print("Will remove the following tarballs:")
print()
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-'*len(pkgs_dir))
fmt = "%-40s %10s"
for fn in pkgs_dirs[pkgs_dir]:
size = getsize(join(pkgs_dir, fn))
print(fmt % (fn, human_bytes(size)))
print()
print('-' * 51) # From 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for fn in pkgs_dirs[pkgs_dir]:
if os.access(os.path.join(pkgs_dir, fn), os.W_OK):
if verbose:
print("Removing %s" % fn)
os.unlink(os.path.join(pkgs_dir, fn))
else:
if verbose:
print("WARNING: cannot remove, file permissions: %s" % fn)
def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
warnings = []
cross_platform_st_nlink = CrossPlatformStLink()
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not os.path.exists(pkgs_dir):
print("WARNING: {0} does not exist".format(pkgs_dir))
continue
pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and
# Only include actual packages
isdir(join(pkgs_dir, i, 'info'))]
for pkg in pkgs:
breakit = False
for root, dir, files in walk(join(pkgs_dir, pkg)):
if breakit:
break
for fn in files:
try:
st_nlink = cross_platform_st_nlink(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
if st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break
else:
pkgs_dirs[pkgs_dir].append(pkg)
totalsize = 0
pkgsizes = defaultdict(list)
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
pkgsize = 0
for root, dir, files in walk(join(pkgs_dir, pkg)):
for fn in files:
# We don't have to worry about counting things twice: by
# definition these files all have a link count of 1!
size = lstat(join(root, fn)).st_size
totalsize += size
pkgsize += size
pkgsizes[pkgs_dir].append(pkgsize)
return pkgs_dirs, warnings, totalsize, pkgsizes
def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=True):
if verbose:
for pkgs_dir in pkgs_dirs:
print('Cache location: %s' % pkgs_dir)
for fn, exception in warnings:
print(exception)
if not any(pkgs_dirs[i] for i in pkgs_dirs):
if verbose:
print("There are no unused packages to remove")
return
if verbose:
print("Will remove the following packages:")
for pkgs_dir in pkgs_dirs:
print(pkgs_dir)
print('-' * len(pkgs_dir))
print()
fmt = "%-40s %10s"
for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]):
print(fmt % (pkg, human_bytes(pkgsize)))
print()
print('-' * 51) # 40 + 1 + 10 in fmt
print(fmt % ('Total:', human_bytes(totalsize)))
print()
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for pkgs_dir in pkgs_dirs:
for pkg in pkgs_dirs[pkgs_dir]:
if verbose:
print("removing %s" % pkg)
rm_rf(join(pkgs_dir, pkg))
def rm_index_cache():
from conda.install import rm_rf
rm_rf(join(config.pkgs_dirs[0], 'cache'))
def find_source_cache():
try:
import conda_build.source
except ImportError:
return {
'warnings': ["conda-build is not installed; could not clean source cache"],
'cache_dirs': [],
'cache_sizes': {},
'total_size': 0,
}
cache_dirs = {
'source cache': conda_build.source.SRC_CACHE,
'git cache': conda_build.source.GIT_CACHE,
'hg cache': conda_build.source.HG_CACHE,
'svn cache': conda_build.source.SVN_CACHE,
}
sizes = {}
totalsize = 0
for cache_type, cache_dir in cache_dirs.items():
dirsize = 0
for root, d, files in walk(cache_dir):
for fn in files:
size = lstat(join(root, fn)).st_size
totalsize += size
dirsize += size
sizes[cache_type] = dirsize
return {
'warnings': [],
'cache_dirs': cache_dirs,
'cache_sizes': sizes,
'total_size': totalsize,
}
def rm_source_cache(args, cache_dirs, warnings, cache_sizes, total_size):
verbose = not args.json
if warnings:
if verbose:
for warning in warnings:
print(warning, file=sys.stderr)
return
for cache_type in cache_dirs:
print("%s (%s)" % (cache_type, cache_dirs[cache_type]))
print("%-40s %10s" % ("Size:",
human_bytes(cache_sizes[cache_type])))
print()
print("%-40s %10s" % ("Total:", human_bytes(total_size)))
if not args.json:
common.confirm_yn(args)
if args.json and args.dry_run:
return
for dir in cache_dirs.values():
print("Removing %s" % dir)
rm_rf(dir)
def execute(args, parser):
json_result = {
'success': True
}
if args.lock:
locks = list(find_lock())
json_result['lock'] = {
'files': locks
}
rm_lock(locks, verbose=not args.json)
if args.tarballs:
pkgs_dirs, totalsize = find_tarballs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['tarballs'] = {
'pkgs_dir': first, # Backwards compabitility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize
}
rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json)
if args.index_cache:
json_result['index_cache'] = {
'files': [join(config.pkgs_dirs[0], 'cache')]
}
rm_index_cache()
if args.packages:
pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs()
first = sorted(pkgs_dirs)[0] if pkgs_dirs else ''
json_result['packages'] = {
'pkgs_dir': first, # Backwards compatibility
'pkgs_dirs': dict(pkgs_dirs),
'files': pkgs_dirs[first], # Backwards compatibility
'total_size': totalsize,
'warnings': warnings,
'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs},
}
rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes,
verbose=not args.json)
if args.source_cache:
json_result['source_cache'] = find_source_cache()
rm_source_cache(args, **json_result['source_cache'])
if not (args.lock or args.tarballs or args.index_cache or args.packages or
args.source_cache):
common.error_and_exit(
"One of {--lock, --tarballs, --index-cache, --packages, --source-cache} required",
error_type="ValueError")
if args.json:
common.stdout_json(json_result)
| conda/cli/main_clean.py
--- a/conda/cli/main_clean.py
+++ b/conda/cli/main_clean.py
@@ -65,6 +65,95 @@ def configure_parser(sub_parsers):
p.set_defaults(func=execute)
+# work-around for python bug on Windows prior to python 3.2
+# https://bugs.python.org/issue10027
+# Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation
+class CrossPlatformStLink(object):
+ _st_nlink = None
+
+ def __call__(self, path):
+ return self.st_nlink(path)
+
+ @classmethod
+ def st_nlink(cls, path):
+ if cls._st_nlink is None:
+ cls._initialize()
+ return cls._st_nlink(path)
+
+ @classmethod
+ def _standard_st_nlink(cls, path):
+ return lstat(path).st_nlink
+
+ @classmethod
+ def _windows_st_nlink(cls, path):
+ st_nlink = cls._standard_st_nlink(path)
+ if st_nlink != 0:
+ return st_nlink
+ else:
+ # cannot trust python on Windows when st_nlink == 0
+ # get value using windows libraries to be sure of its true value
+ # Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation
+ GENERIC_READ = 0x80000000
+ FILE_SHARE_READ = 0x00000001
+ OPEN_EXISTING = 3
+ hfile = cls.CreateFile(path, GENERIC_READ, FILE_SHARE_READ, None,
+ OPEN_EXISTING, 0, None)
+ if hfile is None:
+ from ctypes import WinError
+ raise WinError()
+ info = cls.BY_HANDLE_FILE_INFORMATION()
+ rv = cls.GetFileInformationByHandle(hfile, info)
+ cls.CloseHandle(hfile)
+ if rv == 0:
+ from ctypes import WinError
+ raise WinError()
+ return info.nNumberOfLinks
+
+ @classmethod
+ def _initialize(cls):
+ if os.name != 'nt':
+ cls._st_nlink = cls._standard_st_nlink
+ else:
+ # http://msdn.microsoft.com/en-us/library/windows/desktop/aa363858
+ import ctypes
+ from ctypes import POINTER, WinError
+ from ctypes.wintypes import DWORD, HANDLE, BOOL
+
+ cls.CreateFile = ctypes.windll.kernel32.CreateFileW
+ cls.CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p,
+ DWORD, DWORD, HANDLE]
+ cls.CreateFile.restype = HANDLE
+
+ # http://msdn.microsoft.com/en-us/library/windows/desktop/ms724211
+ cls.CloseHandle = ctypes.windll.kernel32.CloseHandle
+ cls.CloseHandle.argtypes = [HANDLE]
+ cls.CloseHandle.restype = BOOL
+
+ class FILETIME(ctypes.Structure):
+ _fields_ = [("dwLowDateTime", DWORD),
+ ("dwHighDateTime", DWORD)]
+
+ class BY_HANDLE_FILE_INFORMATION(ctypes.Structure):
+ _fields_ = [("dwFileAttributes", DWORD),
+ ("ftCreationTime", FILETIME),
+ ("ftLastAccessTime", FILETIME),
+ ("ftLastWriteTime", FILETIME),
+ ("dwVolumeSerialNumber", DWORD),
+ ("nFileSizeHigh", DWORD),
+ ("nFileSizeLow", DWORD),
+ ("nNumberOfLinks", DWORD),
+ ("nFileIndexHigh", DWORD),
+ ("nFileIndexLow", DWORD)]
+ cls.BY_HANDLE_FILE_INFORMATION = BY_HANDLE_FILE_INFORMATION
+
+ # http://msdn.microsoft.com/en-us/library/windows/desktop/aa364952
+ cls.GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle
+ cls.GetFileInformationByHandle.argtypes = [HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)]
+ cls.GetFileInformationByHandle.restype = BOOL
+
+ cls._st_nlink = cls._windows_st_nlink
+
+
def find_lock():
from os.path import join
@@ -164,7 +253,8 @@ def find_pkgs():
# TODO: This doesn't handle packages that have hard links to files within
# themselves, like bin/python3.3 and bin/python3.3m in the Python package
warnings = []
-
+
+ cross_platform_st_nlink = CrossPlatformStLink()
pkgs_dirs = defaultdict(list)
for pkgs_dir in config.pkgs_dirs:
if not os.path.exists(pkgs_dir):
@@ -180,11 +270,11 @@ def find_pkgs():
break
for fn in files:
try:
- stat = lstat(join(root, fn))
+ st_nlink = cross_platform_st_nlink(join(root, fn))
except OSError as e:
warnings.append((fn, e))
continue
- if stat.st_nlink > 1:
+ if st_nlink > 1:
# print('%s is installed: %s' % (pkg, join(root, fn)))
breakit = True
break |
confusing error message when boto is missing
If one tries to install from an s3 channel (e.g., `conda install foo -c s3://bar/baz/conda`) while in an environment, this message shows up: `Error: boto is required for S3 channels. Please install it with: conda install boto`. Installing `boto` in the environment doesn't actually solve the problem, however, because `boto` has to be installed into the main set of packages (e.g., `source deactivate; conda install boto`).
Perhaps a simple edit to the error message would address this. (e.g., `Error: boto is required for S3 channels. Please install it with: conda install boto. Make sure to run "source deactivate" if you are in an environment.`)
| conda/connection.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from logging import getLogger
import re
import mimetypes
import os
import email
import base64
import ftplib
import cgi
from io import BytesIO
import tempfile
import conda
from conda.compat import urlparse, StringIO
from conda.config import get_proxy_servers, ssl_verify
import requests
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = "conda/%s %s" % (
conda.__version__, self.headers['User-Agent'])
self.verify = ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with: conda install boto\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
try:
bucket = conn.get_bucket(bucket_name)
except boto.exception.S3ResponseError as exc:
resp.status_code = 404
resp.raw = exc
return resp
key = bucket.get_key(key_string)
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr,}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
<|code_end|>
| conda/connection.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from logging import getLogger
import re
import mimetypes
import os
import email
import base64
import ftplib
import cgi
from io import BytesIO
import tempfile
import conda
from conda.compat import urlparse, StringIO
from conda.config import get_proxy_servers, ssl_verify
import requests
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = "conda/%s %s" % (
conda.__version__, self.headers['User-Agent'])
self.verify = ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
try:
bucket = conn.get_bucket(bucket_name)
except boto.exception.S3ResponseError as exc:
resp.status_code = 404
resp.raw = exc
return resp
key = bucket.get_key(key_string)
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr,}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
| conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -101,7 +101,9 @@ def send(self, request, stream=None, timeout=None, verify=None, cert=None,
import boto
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
- 'Please install it with: conda install boto\n')
+ 'Please install it with `conda install boto`\n'
+ 'Make sure to run `source deactivate` if you '
+ 'are in a conda environment.\n')
resp.status_code = 404
return resp
|
conda 4.0.2 failure during dependency resolution
This command fails in Linux and OS X
```
conda create -n testconda anaconda accelerate tornado=4.0.2
```
with this traceback:
```
Traceback (most recent call last):
File "/Users/ewelch/miniconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/Users/ewelch/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 139, in main
args_func(args, p)
File "/Users/ewelch/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 146, in args_func
args.func(args, p)
File "/Users/ewelch/miniconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 49, in execute
install.install(args, parser, 'create')
File "/Users/ewelch/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 334, in install
update_deps=args.update_deps)
File "/Users/ewelch/miniconda/lib/python2.7/site-packages/conda/plan.py", line 435, in install_actions
smh = r.dependency_sort(must_have)
File "/Users/ewelch/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 730, in dependency_sort
depends = lookup(value)
File "/Users/ewelch/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 725, in lookup
return set(ms.name for ms in self.ms_depends(value + '.tar.bz2'))
File "/Users/ewelch/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 574, in ms_depends
deps = [MatchSpec(d) for d in self.index[fn].get('depends', [])]
KeyError: u'anaconda-2.1.0-np19py27_0.tar..tar.bz2'
```
Here is my `conda info`:
```
platform : osx-64
conda version : 4.0.2
conda-build version : 1.18.1
python version : 2.7.11.final.0
requests version : 2.9.1
root environment : /Users/ewelch/miniconda (writable)
default environment : /Users/ewelch/miniconda
envs directories : /Users/ewelch/miniconda/envs
package cache : /Users/ewelch/miniconda/pkgs
channel URLs : https://conda.binstar.org/javascript/osx-64/
https://conda.binstar.org/javascript/noarch/
https://conda.binstar.org/wakari/osx-64/
https://conda.binstar.org/wakari/noarch/
https://repo.continuum.io/pkgs/free/osx-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/osx-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : /Users/ewelch/.condarc
is foreign system : False
```
I can circumvent the exception by using the following diff. I don't propose that this is the solution (I don't know the code well enough), but I hope it's a useful starting point:
``` diff
diff --git a/conda/plan.py b/conda/plan.py
index a7fd836..519dedf 100644
--- a/conda/plan.py
+++ b/conda/plan.py
@@ -412,7 +412,7 @@ def install_actions(prefix, index, specs, force=False, only_names=None,
pkgs = r.install(specs, [d + '.tar.bz2' for d in linked], update_deps=update_deps)
for fn in pkgs:
- dist = fn[:-8]
+ dist = fn[:fn.rindex('.tar.bz2')]
name = install.name_dist(dist)
if not name or only_names and name not in only_names:
continue
```
The reason this fix works is that the filename, `fn`, is `anaconda-2.1.0-np19py27_0.tar.bz2[mkl]`, and the code assumes it ends in `.tar.bz2`.
| conda/resolve.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import logging
from collections import defaultdict
from itertools import chain
from conda.compat import iterkeys, itervalues, iteritems, string_types
from conda.logic import minimal_unsatisfiable_subset, Clauses
from conda.version import VersionSpec, normalized_version
from conda.console import setup_handlers
from conda import config
from conda.toposort import toposort
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def dashlist(iter):
return ''.join('\n - ' + str(x) for x in iter)
class Unsatisfiable(RuntimeError):
'''An exception to report unsatisfiable dependencies.
Args:
bad_deps: a list of tuples of objects (likely MatchSpecs).
chains: (optional) if True, the tuples are interpreted as chains
of dependencies, from top level to bottom. If False, the tuples
are interpreted as simple lists of conflicting specs.
Returns:
Raises an exception with a formatted message detailing the
unsatisfiable specifications.
'''
def __init__(self, bad_deps, chains=True):
bad_deps = [list(map(str, dep)) for dep in bad_deps]
if chains:
chains = {}
for dep in sorted(bad_deps, key=len, reverse=True):
dep1 = [str(MatchSpec(s)).partition(' ') for s in dep[1:]]
key = (dep[0],) + tuple(v[0] for v in dep1)
vals = ('',) + tuple(v[2] for v in dep1)
found = False
for key2, csets in iteritems(chains):
if key2[:len(key)] == key:
for cset, val in zip(csets, vals):
cset.add(val)
found = True
if not found:
chains[key] = [{val} for val in vals]
bad_deps = []
for key, csets in iteritems(chains):
deps = []
for name, cset in zip(key, csets):
if '' not in cset:
pass
elif len(cset) == 1:
cset.clear()
else:
cset.remove('')
cset.add('*')
deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name)
chains[key] = ' -> '.join(deps)
bad_deps = [chains[key] for key in sorted(iterkeys(chains))]
msg = '''The following specifications were found to be in conflict:%s
Use "conda info <package>" to see the dependencies for each package.'''
else:
bad_deps = [sorted(dep) for dep in bad_deps]
bad_deps = [', '.join(dep) for dep in sorted(bad_deps)]
msg = '''The following specifications were found to be incompatible with the
others, or with the existing package set:%s
Use "conda info <package>" to see the dependencies for each package.'''
msg = msg % dashlist(bad_deps)
super(Unsatisfiable, self).__init__(msg)
class NoPackagesFound(RuntimeError):
'''An exception to report that requested packages are missing.
Args:
bad_deps: a list of tuples of MatchSpecs, assumed to be dependency
chains, from top level to bottom.
Returns:
Raises an exception with a formatted message detailing the
missing packages and/or dependencies.
'''
def __init__(self, bad_deps):
deps = set(q[-1].spec for q in bad_deps)
if all(len(q) > 1 for q in bad_deps):
what = "Dependencies" if len(bad_deps) > 1 else "Dependency"
elif all(len(q) == 1 for q in bad_deps):
what = "Packages" if len(bad_deps) > 1 else "Package"
else:
what = "Packages/dependencies"
bad_deps = dashlist(' -> '.join(map(str, q)) for q in bad_deps)
msg = ' % s missing in current %s channels: %s' % (what, config.subdir, bad_deps)
super(NoPackagesFound, self).__init__(msg)
self.pkgs = deps
class MatchSpec(object):
def __new__(cls, spec, target=None, optional=False, negate=False):
if isinstance(spec, cls):
return spec
self = object.__new__(cls)
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3, repr(spec)
self.name = parts[0]
if self.strictness == 2:
self.vspecs = VersionSpec(parts[1])
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
self.target = target
self.optional = optional
self.negate = negate
return self
def match_fast(self, version, build):
if self.strictness == 1:
res = True
elif self.strictness == 2:
res = self.vspecs.match(version)
else:
res = bool((version, build) == self.ver_build)
return res != self.negate
def match(self, info):
if isinstance(info, string_types):
name, version, build = info[:-8].rsplit('-', 2)
else:
name = info.get('name')
version = info.get('version')
build = info.get('build')
if name != self.name:
return False
return self.match_fast(version, build)
def to_filename(self):
if self.strictness == 3 and not self.optional and not self.negate:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return type(other) is MatchSpec and self.spec == other.spec
def __hash__(self):
return hash((self.spec, self.negate))
def __repr__(self):
res = 'MatchSpec(' + repr(self.spec)
if self.target:
res += ',target=' + repr(self.target)
if self.optional:
res += ',optional=True'
if self.negate:
res += ',negate=True'
return res + ')'
def __str__(self):
res = self.spec
if self.target or self.optional:
mods = []
if self.target:
mods.append('target='+str(self.target))
if self.optional:
mods.append('optional')
if self.negate:
mods.append('negate')
res += ' (' + ', '.join(mods) + ')'
return res
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info.get('name')
self.version = info.get('version')
self.build = info.get('build')
self.build_number = info.get('build_number')
self.channel = info.get('channel')
try:
self.norm_version = normalized_version(self.version)
except ValueError:
stderrlog.error("\nThe following stack trace is in reference to "
"package:\n\n\t%s\n\n" % fn)
raise
self.info = info
def _asdict(self):
result = self.info.copy()
result['fn'] = self.fn
result['norm_version'] = str(self.norm_version)
return result
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
return ((self.norm_version, self.build_number, self.build) <
(other.norm_version, other.build_number, other.build))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
def __ne__(self, other):
return not self == other
def __gt__(self, other):
return other < self
def __le__(self, other):
return not (other < self)
def __ge__(self, other):
return not (self < other)
def build_groups(index):
groups = {}
trackers = {}
for fn, info in iteritems(index):
groups.setdefault(info['name'], []).append(fn)
for feat in info.get('track_features', '').split():
trackers.setdefault(feat, []).append(fn)
return groups, trackers
class Resolve(object):
def __init__(self, index):
self.index = index.copy()
for fn, info in iteritems(index):
for fstr in chain(info.get('features', '').split(),
info.get('track_features', '').split()):
fpkg = fstr + '@'
if fpkg not in self.index:
self.index[fpkg] = {
'name': fpkg, 'version': '0', 'build_number': 0,
'build': '', 'depends': [], 'track_features': fstr}
for fstr in iterkeys(info.get('with_features_depends', {})):
fn2 = fn + '[' + fstr + ']'
self.index[fn2] = info
self.groups, self.trackers = build_groups(self.index)
self.find_matches_ = {}
self.ms_depends_ = {}
def default_filter(self, features=None, filter=None):
if filter is None:
filter = {}
else:
filter.clear()
filter.update({fstr+'@': False for fstr in iterkeys(self.trackers)})
if features:
filter.update({fstr+'@': True for fstr in features})
return filter
def valid(self, spec, filter):
"""Tests if a package, MatchSpec, or a list of both has satisfiable
dependencies, assuming cyclic dependencies are always valid.
Args:
fn: a package key, a MatchSpec, or an iterable of these.
filter: a dictionary of (fn,valid) pairs, used to consider a subset
of dependencies, and to eliminate repeated searches.
Returns:
True if the full set of dependencies can be satisfied; False otherwise.
If filter is supplied and update is True, it will be updated with the
search results.
"""
def v_(spec):
return v_ms_(spec) if isinstance(spec, MatchSpec) else v_fn_(spec)
def v_ms_(ms):
return ms.optional or any(v_fn_(fn) for fn in self.find_matches(ms))
def v_fn_(fn):
val = filter.get(fn)
if val is None:
filter[fn] = True
val = filter[fn] = all(v_ms_(ms) for ms in self.ms_depends(fn))
return val
return v_(spec)
def touch(self, spec, touched, filter):
"""Determines a conservative set of packages to be considered given a
package, or a spec, or a list thereof. Cyclic dependencies are not
solved, so there is no guarantee a solution exists.
Args:
fn: a package key or MatchSpec
touched: a dict into which to accumulate the result. This is
useful when processing multiple specs.
filter: a dictionary of (fn,valid) pairs to be used when
testing for package validity.
This function works in two passes. First, it verifies that the package has
satisfiable dependencies from among the filtered packages. If not, then it
is _not_ touched, nor are its dependencies. If so, then it is marked as
touched, and any of its valid dependencies are as well.
"""
def t_fn_(fn):
val = touched.get(fn)
if val is None:
val = touched[fn] = self.valid(fn, filter)
if val:
for ms in self.ms_depends(fn):
if ms.name[0] != '@':
t_ms_(ms)
def t_ms_(ms):
for fn in self.find_matches(ms):
t_fn_(fn)
return t_ms_(spec) if isinstance(spec, MatchSpec) else t_fn_(spec)
def invalid_chains(self, spec, filter):
"""Constructs a set of 'dependency chains' for invalid specs.
A dependency chain is a tuple of MatchSpec objects, starting with
the requested spec, proceeding down the dependency tree, ending at
a specification that cannot be satisfied. Uses self.valid_ as a
filter, both to prevent chains and to allow other routines to
prune the list of valid packages with additional criteria.
Args:
spec: a package key or MatchSpec
filter: a dictionary of (fn,valid) pairs to be used when
testing for package validity.
Returns:
A list of tuples, or an empty list if the MatchSpec is valid.
"""
def chains_(spec, top=None):
if spec.name == top or self.valid(spec, filter):
return []
notfound = set()
specs = self.find_matches(spec) if isinstance(spec, MatchSpec) else [spec]
for fn in specs:
for m2 in self.ms_depends(fn):
notfound.update(chains_(m2))
return [(spec,) + x for x in notfound] if notfound else [(spec,)]
return chains_(spec)
def verify_specs(self, specs):
"""Perform a quick verification that specs and dependencies are reasonable.
Args:
specs: An iterable of strings or MatchSpec objects to be tested.
Returns:
Nothing, but if there is a conflict, an error is thrown.
Note that this does not attempt to resolve circular dependencies.
"""
bad_deps = []
opts = []
rems = []
spec2 = []
feats = set()
for s in specs:
ms = MatchSpec(s)
if ms.name[-1] == '@':
feats.add(ms.name[:-1])
continue
if ms.negate:
rems.append(MatchSpec(ms.spec))
if not ms.optional:
spec2.append(ms)
elif any(self.find_matches(ms)):
opts.append(ms)
for ms in spec2:
filter = self.default_filter(feats)
if not self.valid(ms, filter):
bad_deps.extend(self.invalid_chains(ms, filter))
if bad_deps:
raise NoPackagesFound(bad_deps)
return spec2, rems, opts, feats
def get_dists(self, specs):
log.debug('Retrieving packages for: %s' % specs)
specs, removes, optional, features = self.verify_specs(specs)
filter = {}
touched = {}
snames = set()
nspecs = set()
unsat = []
def filter_group(matches, chains=None):
# If we are here, then this dependency is mandatory,
# so add it to the master list. That way it is still
# participates in the pruning even if one of its
# parents is pruned away
match1 = next(ms for ms in matches)
name = match1.name
first = name not in snames
group = self.groups.get(name, [])
# Prune packages that don't match any of the patterns
# or which have unsatisfiable dependencies
nold = 0
bad_deps = []
for fn in group:
if filter.setdefault(fn, True):
nold += 1
sat = self.match_any(matches, fn)
sat = sat and all(any(filter.get(f2, True) for f2 in self.find_matches(ms))
for ms in self.ms_depends(fn))
filter[fn] = sat
if not sat:
bad_deps.append(fn)
# Build dependency chains if we detect unsatisfiability
nnew = nold - len(bad_deps)
reduced = nnew < nold
if reduced:
log.debug(' % s: pruned from %d -> %d' % (name, nold, nnew))
if nnew == 0:
if name in snames:
snames.remove(name)
bad_deps = [fn for fn in bad_deps if self.match_any(matches, fn)]
matches = [(ms,) for ms in matches]
chains = [a + b for a in chains for b in matches] if chains else matches
if bad_deps:
dep2 = set()
for fn in bad_deps:
for ms in self.ms_depends(fn):
if not any(filter.get(f2, True) for f2 in self.find_matches(ms)):
dep2.add(ms)
chains = [a + (b,) for a in chains for b in dep2]
unsat.extend(chains)
return nnew
if not reduced and not first:
return False
# Perform the same filtering steps on any dependencies shared across
# *all* packages in the group. Even if just one of the packages does
# not have a particular dependency, it must be ignored in this pass.
if first:
snames.add(name)
if match1 not in specs:
nspecs.add(MatchSpec(name))
cdeps = defaultdict(list)
for fn in group:
if filter[fn]:
for m2 in self.ms_depends(fn):
if m2.name[0] != '@' and not m2.optional:
cdeps[m2.name].append(m2)
cdeps = {mname: set(deps) for mname, deps in iteritems(cdeps) if len(deps) == nnew}
if cdeps:
matches = [(ms,) for ms in matches]
if chains:
matches = [a + b for a in chains for b in matches]
if sum(filter_group(deps, chains) for deps in itervalues(cdeps)):
reduced = True
return reduced
# Iterate in the filtering process until no more progress is made
def full_prune(specs, removes, optional, features):
self.default_filter(features, filter)
for ms in removes:
for fn in self.find_matches(ms):
filter[fn] = False
feats = set(self.trackers.keys())
snames.clear()
specs = slist = list(specs)
onames = set(s.name for s in specs)
for iter in range(10):
first = True
while sum(filter_group([s]) for s in slist):
slist = specs + [MatchSpec(n) for n in snames - onames]
first = False
if unsat:
return False
if first and iter:
return True
touched.clear()
for fstr in features:
touched[fstr+'@'] = True
for spec in chain(specs, optional):
self.touch(spec, touched, filter)
nfeats = set()
for fn, val in iteritems(touched):
if val:
nfeats.update(self.track_features(fn))
if len(nfeats) >= len(feats):
return True
pruned = False
feats &= nfeats
for fn, val in iteritems(touched):
if val and self.features(fn) - feats:
touched[fn] = filter[fn] = False
filter[fn] = False
pruned = True
if not pruned:
return True
#
# In the case of a conflict, look for the minimum satisfiable subset
#
if not full_prune(specs, removes, optional, features):
def minsat_prune(specs):
return full_prune(specs, removes, [], features)
save_unsat = set(s for s in unsat if s[0] in specs)
stderrlog.info('...')
hint = minimal_unsatisfiable_subset(specs, sat=minsat_prune, log=False)
save_unsat.update((ms,) for ms in hint)
raise Unsatisfiable(save_unsat)
dists = {fn: self.index[fn] for fn, val in iteritems(touched) if val}
return dists, list(map(MatchSpec, snames - {ms.name for ms in specs}))
def match_any(self, mss, fn):
rec = self.index[fn]
n, v, b = rec['name'], rec['version'], rec['build']
return any(n == ms.name and ms.match_fast(v, b) for ms in mss)
def match(self, ms, fn):
return MatchSpec(ms).match(self.index[fn])
def find_matches_group(self, ms, groups, trackers=None):
ms = MatchSpec(ms)
if ms.name[0] == '@' and trackers:
for fn in trackers.get(ms.name[1:], []):
yield fn
else:
for fn in groups.get(ms.name, []):
rec = self.index[fn]
if ms.match_fast(rec['version'], rec['build']):
yield fn
def find_matches(self, ms):
ms = MatchSpec(ms)
res = self.find_matches_.get(ms, None)
if res is None:
if ms.name[0] == '@':
res = self.find_matches_[ms] = self.trackers.get(ms.name[1:], [])
else:
res = self.find_matches_[ms] = list(self.find_matches_group(ms, self.groups))
return res
def ms_depends(self, fn):
deps = self.ms_depends_.get(fn, None)
if deps is None:
if fn[-1] == ']':
fn2, fstr = fn[:-1].split('[')
fdeps = {d.name: d for d in self.ms_depends(fn2)}
for dep in self.index[fn2]['with_features_depends'][fstr]:
dep = MatchSpec(dep)
fdeps[dep.name] = dep
deps = list(fdeps.values())
else:
deps = [MatchSpec(d) for d in self.index[fn].get('depends', [])]
deps.extend(MatchSpec('@'+feat) for feat in self.features(fn))
self.ms_depends_[fn] = deps
return deps
def version_key(self, fn, vtype=None):
rec = self.index[fn]
return (normalized_version(rec['version']), rec['build_number'])
def features(self, fn):
return set(self.index[fn].get('features', '').split())
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
def package_triple(self, fn):
if not fn.endswith('.tar.bz2'):
return self.package_triple(fn + '.tar.bz2')
rec = self.index.get(fn, None)
if rec is None:
return fn[:-8].rsplit('-', 2)
return (rec['name'], rec['version'], rec['build'])
def package_name(self, fn):
return self.package_triple(fn)[0]
def get_pkgs(self, ms, emptyok=False):
ms = MatchSpec(ms)
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs and not emptyok:
raise NoPackagesFound([(ms,)])
return pkgs
@staticmethod
def ms_to_v(ms):
return '@s@' + ms.spec + ('!' if ms.negate else '')
@staticmethod
def feat_to_v(feat):
return '@s@@' + feat
def gen_clauses(self, groups, trackers, specs):
C = Clauses()
# Creates a variable that represents the proposition:
# Does the package set include a package that matches MatchSpec "ms"?
def push_MatchSpec(ms):
name = self.ms_to_v(ms)
m = C.from_name(name)
if m is None:
libs = [fn for fn in self.find_matches_group(ms, groups, trackers)]
# If the MatchSpec is optional, then there may be cases where we want
# to assert that it is *not* True. This requires polarity=None.
m = C.Any(libs, polarity=None if ms.optional else True, name=name)
return m
# Creates a variable that represents the proposition:
# Does the package set include package "fn"?
for group in itervalues(groups):
for fn in group:
C.new_var(fn)
# Install no more than one version of each package
C.Require(C.AtMostOne, group)
# Create a variable that represents the proposition:
# Is the feature "name" active in this package set?
# We mark this as "optional" below because sometimes we need to be able to
# assert the proposition is False during the feature minimization pass.
for name in iterkeys(trackers):
ms = MatchSpec('@' + name)
ms.optional = True
push_MatchSpec(ms)
# Create a variable that represents the proposition:
# Is the MatchSpec "ms" satisfied by the current package set?
for ms in specs:
push_MatchSpec(ms)
# Create propositions that assert:
# If package "fn" is installed, its dependencie must be satisfied
for group in itervalues(groups):
for fn in group:
for ms in self.ms_depends(fn):
if not ms.optional:
C.Require(C.Or, C.Not(fn), push_MatchSpec(ms))
return C
def generate_spec_constraints(self, C, specs):
return [(self.ms_to_v(ms),) for ms in specs if not ms.optional]
def generate_feature_count(self, C, trackers):
return {self.feat_to_v(name): 1 for name in iterkeys(trackers)}
def generate_feature_metric(self, C, groups, specs):
eq = {}
total = 0
for name, group in iteritems(groups):
nf = [len(self.features(fn)) for fn in group]
maxf = max(nf)
eq.update({fn: maxf-fc for fn, fc in zip(group, nf) if fc < maxf})
total += maxf
return eq, total
def generate_removal_count(self, C, specs):
return {'!'+self.ms_to_v(ms): 1 for ms in specs}
def generate_version_metrics(self, C, groups, specs,
missing=False, start0=True):
eqv = {}
eqb = {}
sdict = {}
for s in specs:
s = MatchSpec(s) # needed for testing
sdict.setdefault(s.name, []).append(s)
for name, pkgs in iteritems(groups):
mss = sdict.get(name, [])
bmss = bool(mss)
if bmss == missing:
continue
pkgs = [(self.version_key(p), p) for p in pkgs]
# If the "target" field in the MatchSpec is supplied, that means we want
# to minimize the changes to the currently installed package. We prefer
# any upgrade over any downgrade, but beyond that we want minimal change.
targets = [ms.target for ms in mss if ms.target]
if targets:
tver = max(self.version_key(p) for p in targets)
v1 = [p for p in pkgs if p[1] in targets]
v2 = sorted((p for p in pkgs if p[0] >= tver and p[-1] not in targets))
v3 = sorted((p for p in pkgs if p[0] < tver), reverse=True)
pkgs = v1 + v2 + v3
else:
pkgs = sorted(pkgs, reverse=True)
pkey = ppkg = None
for nkey, npkg in pkgs:
if pkey is None:
iv = 0 if start0 else 1
ib = 0
elif pkey[0] != nkey[0]:
iv += 1
ib = 0
elif pkey[1] != nkey[1]:
ib += 1
if iv:
eqv[npkg] = iv
if ib:
eqb[npkg] = ib
pkey, ppkg = nkey, npkg
return eqv, eqb
def dependency_sort(self, must_have):
def lookup(value):
return set(ms.name for ms in self.ms_depends(value + '.tar.bz2'))
digraph = {}
if not isinstance(must_have, dict):
must_have = {self.package_name(dist): dist for dist in must_have}
for key, value in iteritems(must_have):
depends = lookup(value)
digraph[key] = depends
sorted_keys = toposort(digraph)
must_have = must_have.copy()
# Take all of the items in the sorted keys
# Don't fail if the key does not exist
result = [must_have.pop(key) for key in sorted_keys if key in must_have]
# Take any key that were not sorted
result.extend(must_have.values())
return result
# name deprecated; use dependency_sort instead
def graph_sort(self, must_have):
return self.dependency_sort(must_have)
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
specs = list(map(MatchSpec, specs))
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
if fn not in self.index:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [spec.to_filename() for spec in specs if str(spec) != 'conda']
if None in res:
return None
res.sort()
dotlog.debug('explicit(%r) finished' % specs)
return res
def sum_matches(self, fn1, fn2):
return sum(self.match(ms, fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version)):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def bad_installed(self, installed, new_specs):
log.debug('Checking if the current environment is consistent')
if not installed:
return None, []
xtra = []
dists = {}
specs = []
for fn in installed:
rec = self.index.get(fn)
if rec is None:
xtra.append(fn)
else:
dists[fn] = rec
specs.append(MatchSpec(' '.join(self.package_triple(fn))))
if xtra:
log.debug('Packages missing from index: %s' % ', '.join(xtra))
groups, trackers = build_groups(dists)
C = self.gen_clauses(groups, trackers, specs)
constraints = self.generate_spec_constraints(C, specs)
solution = C.sat(constraints)
limit = None
if not solution or xtra:
def get_(name, snames):
if name not in snames:
snames.add(name)
for fn in self.groups.get(name, []):
for ms in self.ms_depends(fn):
get_(ms.name, snames)
snames = set()
for spec in new_specs:
get_(MatchSpec(spec).name, snames)
xtra = [x for x in xtra if x not in snames]
if xtra or not (solution or all(s.name in snames for s in specs)):
limit = set(s.name for s in specs if s.name in snames)
xtra = [fn for fn in installed if self.package_name(fn) not in snames]
log.debug(
'Limiting solver to the following packages: %s' %
', '.join(limit))
if xtra:
log.debug('Packages to be preserved: %s' % ', '.join(xtra))
return limit, xtra
def restore_bad(self, pkgs, preserve):
if preserve:
sdict = {self.package_name(pkg): pkg for pkg in pkgs}
pkgs.extend(p for p in preserve if self.package_name(p) not in sdict)
def install_specs(self, specs, installed, update_deps=True):
specs = list(map(MatchSpec, specs))
snames = {s.name for s in specs}
log.debug('Checking satisfiability of current install')
limit, preserve = self.bad_installed(installed, specs)
for pkg in installed:
if pkg not in self.index:
continue
name, version, build = self.package_triple(pkg)
if name in snames or limit is not None and name not in limit:
continue
# If update_deps=True, set the target package in MatchSpec so that
# the solver can minimize the version change. If update_deps=False,
# fix the version and build so that no change is possible.
if update_deps:
spec = MatchSpec(name, target=pkg)
else:
spec = MatchSpec(' % s %s %s' % (name, version, build))
specs.append(spec)
return specs, preserve
def install(self, specs, installed=None, update_deps=True, returnall=False):
len0 = len(specs)
specs, preserve = self.install_specs(specs, installed or [], update_deps)
pkgs = self.solve(specs, len0=len0, returnall=returnall)
self.restore_bad(pkgs, preserve)
return pkgs
def remove_specs(self, specs, installed):
specs = [MatchSpec(s, optional=True, negate=True) for s in specs]
snames = {s.name for s in specs}
limit, _ = self.bad_installed(installed, specs)
preserve = []
for pkg in installed:
nm = self.package_name(pkg)
if nm in snames:
continue
elif limit is None:
specs.append(MatchSpec(self.package_name(pkg), optional=True, target=pkg))
else:
preserve.append(pkg)
return specs, preserve
def remove(self, specs, installed):
specs, preserve = self.remove_specs(specs, installed)
pkgs = self.solve(specs)
self.restore_bad(pkgs, preserve)
return pkgs
def solve(self, specs, len0=None, returnall=False):
try:
stdoutlog.info("Solving package specifications: ")
dotlog.debug("Solving for %s" % (specs,))
# Find the compliant packages
specs = list(map(MatchSpec, specs))
if len0 is None:
len0 = len(specs)
dists, new_specs = self.get_dists(specs)
if not dists:
return False if dists is None else ([[]] if returnall else [])
# Check if satisfiable
dotlog.debug('Checking satisfiability')
groups, trackers = build_groups(dists)
C = self.gen_clauses(groups, trackers, specs)
constraints = self.generate_spec_constraints(C, specs)
solution = C.sat(constraints, True)
if not solution:
# Find the largest set of specs that are satisfiable, and return
# the list of specs that are not in that set.
solution = [C.Not(q) for q in range(1, C.m+1)]
spec2 = [s for s in specs if not s.optional]
eq_removal_count = self.generate_removal_count(C, spec2)
solution, obj1 = C.minimize(eq_removal_count, solution)
specsol = [(s,) for s in spec2 if C.from_name(self.ms_to_v(s)) not in solution]
raise Unsatisfiable(specsol, False)
specs.extend(new_specs)
# Optional packages: maximize count, then versions, then builds
speco = [s for s in specs if s.optional and
any(self.find_matches_group(s, groups, trackers))]
eq_optional_count = self.generate_removal_count(C, speco)
solution, obj7 = C.minimize(eq_optional_count, solution)
dotlog.debug('Package removal metric: %d' % obj7)
nz = len(C.clauses)
nv = C.m
# Requested packages: maximize versions, then builds
eq_requested_versions, eq_requested_builds = self.generate_version_metrics(
C, groups, (s for s in specs[:len0] if not s.optional))
solution, obj3 = C.minimize(eq_requested_versions, solution)
solution, obj4 = C.minimize(eq_requested_builds, solution)
dotlog.debug('Initial package version/build metrics: %d/%d' % (obj3, obj4))
# Minimize the number of installed track_features, maximize featured package count
eq_feature_count = self.generate_feature_count(C, trackers)
solution, obj1 = C.minimize(eq_feature_count, solution)
dotlog.debug('Track feature count: %d' % obj1)
# Now that we have the feature count, lock it in and re-optimize
C.clauses = C.clauses[:nz]
C.m = nv
C.Require(C.LinearBound, eq_feature_count, obj1, obj1)
solution = C.sat()
eq_feature_metric, ftotal = self.generate_feature_metric(C, groups, specs)
solution, obj2 = C.minimize(eq_feature_metric, solution)
obj2 = ftotal - obj2
dotlog.debug('Package feature count: %d' % obj2)
# Re-optimize requested packages: maximize versions, then builds
solution, obj3 = C.minimize(eq_requested_versions, solution)
solution, obj4 = C.minimize(eq_requested_builds, solution)
dotlog.debug('Requested package version/build metrics: %d/%d' % (obj3, obj4))
# Required packages: maximize versions, then builds
eq_required_versions, eq_required_builds = self.generate_version_metrics(
C, groups, (s for s in specs[len0:] if not s.optional))
solution, obj5 = C.minimize(eq_required_versions, solution)
solution, obj6 = C.minimize(eq_required_builds, solution)
dotlog.debug('Required package version/build metrics: %d/%d' % (obj5, obj6))
# Optional packages: maximize count, then versions, then builds
eq_optional_versions, eq_optional_builds = self.generate_version_metrics(
C, groups, speco)
solution, obj8 = C.minimize(eq_optional_versions, solution)
solution, obj9 = C.minimize(eq_optional_builds, solution)
dotlog.debug('Optional package version/build metrics: %d/%d' % (obj8, obj9))
# All other packages: maximize versions (favoring none), then builds
eq_remaining_versions, eq_remaining_builds = self.generate_version_metrics(
C, groups, specs, missing=True, start0=False)
solution, obj10 = C.minimize(eq_remaining_versions, solution)
solution, obj11 = C.minimize(eq_remaining_builds, solution)
dotlog.debug('Additional package version/build metrics: %d/%d' % (obj10, obj11))
def clean(sol):
return [q for q in (C.from_index(s) for s in sol)
if q and q[0] != '!' and '@' not in q]
dotlog.debug('Looking for alternate solutions')
nsol = 1
psolutions = []
psolution = clean(solution)
psolutions.append(psolution)
while True:
nclause = tuple(C.Not(C.from_name(q)) for q in psolution)
solution = C.sat((nclause,), True)
if solution is None:
break
nsol += 1
if nsol > 10:
dotlog.debug('Too many solutions; terminating')
break
psolution = clean(solution)
psolutions.append(psolution)
if nsol > 1:
psols2 = list(map(set, psolutions))
common = set.intersection(*psols2)
diffs = [sorted(set(sol) - common) for sol in psols2]
stdoutlog.info(
'\nWarning: %s possible package resolutions '
'(only showing differing packages):%s%s' %
('>10' if nsol > 10 else nsol,
dashlist(', '.join(diff) for diff in diffs),
'\n ... and others' if nsol > 10 else ''))
stdoutlog.info('\n')
return list(map(sorted, psolutions)) if returnall else sorted(psolutions[0])
except:
stdoutlog.info('\n')
raise
<|code_end|>
| conda/resolve.py
from __future__ import print_function, division, absolute_import
import logging
from collections import defaultdict
from itertools import chain
from conda.compat import iterkeys, itervalues, iteritems, string_types
from conda.logic import minimal_unsatisfiable_subset, Clauses
from conda.version import VersionSpec, normalized_version
from conda.console import setup_handlers
from conda import config
from conda.toposort import toposort
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def dashlist(iter):
return ''.join('\n - ' + str(x) for x in iter)
class Unsatisfiable(RuntimeError):
'''An exception to report unsatisfiable dependencies.
Args:
bad_deps: a list of tuples of objects (likely MatchSpecs).
chains: (optional) if True, the tuples are interpreted as chains
of dependencies, from top level to bottom. If False, the tuples
are interpreted as simple lists of conflicting specs.
Returns:
Raises an exception with a formatted message detailing the
unsatisfiable specifications.
'''
def __init__(self, bad_deps, chains=True):
bad_deps = [list(map(str, dep)) for dep in bad_deps]
if chains:
chains = {}
for dep in sorted(bad_deps, key=len, reverse=True):
dep1 = [str(MatchSpec(s)).partition(' ') for s in dep[1:]]
key = (dep[0],) + tuple(v[0] for v in dep1)
vals = ('',) + tuple(v[2] for v in dep1)
found = False
for key2, csets in iteritems(chains):
if key2[:len(key)] == key:
for cset, val in zip(csets, vals):
cset.add(val)
found = True
if not found:
chains[key] = [{val} for val in vals]
bad_deps = []
for key, csets in iteritems(chains):
deps = []
for name, cset in zip(key, csets):
if '' not in cset:
pass
elif len(cset) == 1:
cset.clear()
else:
cset.remove('')
cset.add('*')
deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name)
chains[key] = ' -> '.join(deps)
bad_deps = [chains[key] for key in sorted(iterkeys(chains))]
msg = '''The following specifications were found to be in conflict:%s
Use "conda info <package>" to see the dependencies for each package.'''
else:
bad_deps = [sorted(dep) for dep in bad_deps]
bad_deps = [', '.join(dep) for dep in sorted(bad_deps)]
msg = '''The following specifications were found to be incompatible with the
others, or with the existing package set:%s
Use "conda info <package>" to see the dependencies for each package.'''
msg = msg % dashlist(bad_deps)
super(Unsatisfiable, self).__init__(msg)
class NoPackagesFound(RuntimeError):
'''An exception to report that requested packages are missing.
Args:
bad_deps: a list of tuples of MatchSpecs, assumed to be dependency
chains, from top level to bottom.
Returns:
Raises an exception with a formatted message detailing the
missing packages and/or dependencies.
'''
def __init__(self, bad_deps):
deps = set(q[-1].spec for q in bad_deps)
if all(len(q) > 1 for q in bad_deps):
what = "Dependencies" if len(bad_deps) > 1 else "Dependency"
elif all(len(q) == 1 for q in bad_deps):
what = "Packages" if len(bad_deps) > 1 else "Package"
else:
what = "Packages/dependencies"
bad_deps = dashlist(' -> '.join(map(str, q)) for q in bad_deps)
msg = ' % s missing in current %s channels: %s' % (what, config.subdir, bad_deps)
super(NoPackagesFound, self).__init__(msg)
self.pkgs = deps
class MatchSpec(object):
def __new__(cls, spec, target=None, optional=False, negate=False):
if isinstance(spec, cls):
return spec
self = object.__new__(cls)
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3, repr(spec)
self.name = parts[0]
if self.strictness == 2:
self.vspecs = VersionSpec(parts[1])
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
self.target = target
self.optional = optional
self.negate = negate
return self
def match_fast(self, version, build):
if self.strictness == 1:
res = True
elif self.strictness == 2:
res = self.vspecs.match(version)
else:
res = bool((version, build) == self.ver_build)
return res != self.negate
def match(self, info):
if isinstance(info, string_types):
name, version, build = info[:-8].rsplit('-', 2)
else:
name = info.get('name')
version = info.get('version')
build = info.get('build')
if name != self.name:
return False
return self.match_fast(version, build)
def to_filename(self):
if self.strictness == 3 and not self.optional and not self.negate:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return type(other) is MatchSpec and self.spec == other.spec
def __hash__(self):
return hash((self.spec, self.negate))
def __repr__(self):
res = 'MatchSpec(' + repr(self.spec)
if self.target:
res += ',target=' + repr(self.target)
if self.optional:
res += ',optional=True'
if self.negate:
res += ',negate=True'
return res + ')'
def __str__(self):
res = self.spec
if self.target or self.optional:
mods = []
if self.target:
mods.append('target='+str(self.target))
if self.optional:
mods.append('optional')
if self.negate:
mods.append('negate')
res += ' (' + ', '.join(mods) + ')'
return res
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info.get('name')
self.version = info.get('version')
self.build = info.get('build')
self.build_number = info.get('build_number')
self.channel = info.get('channel')
try:
self.norm_version = normalized_version(self.version)
except ValueError:
stderrlog.error("\nThe following stack trace is in reference to "
"package:\n\n\t%s\n\n" % fn)
raise
self.info = info
def _asdict(self):
result = self.info.copy()
result['fn'] = self.fn
result['norm_version'] = str(self.norm_version)
return result
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
return ((self.norm_version, self.build_number, self.build) <
(other.norm_version, other.build_number, other.build))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
def __ne__(self, other):
return not self == other
def __gt__(self, other):
return other < self
def __le__(self, other):
return not (other < self)
def __ge__(self, other):
return not (self < other)
def build_groups(index):
groups = {}
trackers = {}
for fn, info in iteritems(index):
groups.setdefault(info['name'], []).append(fn)
for feat in info.get('track_features', '').split():
trackers.setdefault(feat, []).append(fn)
return groups, trackers
class Resolve(object):
def __init__(self, index):
self.index = index.copy()
for fn, info in iteritems(index):
for fstr in chain(info.get('features', '').split(),
info.get('track_features', '').split()):
fpkg = fstr + '@'
if fpkg not in self.index:
self.index[fpkg] = {
'name': fpkg, 'version': '0', 'build_number': 0,
'build': '', 'depends': [], 'track_features': fstr}
for fstr in iterkeys(info.get('with_features_depends', {})):
fn2 = fn + '[' + fstr + ']'
self.index[fn2] = info
self.groups, self.trackers = build_groups(self.index)
self.find_matches_ = {}
self.ms_depends_ = {}
def default_filter(self, features=None, filter=None):
if filter is None:
filter = {}
else:
filter.clear()
filter.update({fstr+'@': False for fstr in iterkeys(self.trackers)})
if features:
filter.update({fstr+'@': True for fstr in features})
return filter
def valid(self, spec, filter):
"""Tests if a package, MatchSpec, or a list of both has satisfiable
dependencies, assuming cyclic dependencies are always valid.
Args:
fn: a package key, a MatchSpec, or an iterable of these.
filter: a dictionary of (fn,valid) pairs, used to consider a subset
of dependencies, and to eliminate repeated searches.
Returns:
True if the full set of dependencies can be satisfied; False otherwise.
If filter is supplied and update is True, it will be updated with the
search results.
"""
def v_(spec):
return v_ms_(spec) if isinstance(spec, MatchSpec) else v_fn_(spec)
def v_ms_(ms):
return ms.optional or any(v_fn_(fn) for fn in self.find_matches(ms))
def v_fn_(fn):
val = filter.get(fn)
if val is None:
filter[fn] = True
val = filter[fn] = all(v_ms_(ms) for ms in self.ms_depends(fn))
return val
return v_(spec)
def touch(self, spec, touched, filter):
"""Determines a conservative set of packages to be considered given a
package, or a spec, or a list thereof. Cyclic dependencies are not
solved, so there is no guarantee a solution exists.
Args:
fn: a package key or MatchSpec
touched: a dict into which to accumulate the result. This is
useful when processing multiple specs.
filter: a dictionary of (fn,valid) pairs to be used when
testing for package validity.
This function works in two passes. First, it verifies that the package has
satisfiable dependencies from among the filtered packages. If not, then it
is _not_ touched, nor are its dependencies. If so, then it is marked as
touched, and any of its valid dependencies are as well.
"""
def t_fn_(fn):
val = touched.get(fn)
if val is None:
val = touched[fn] = self.valid(fn, filter)
if val:
for ms in self.ms_depends(fn):
if ms.name[0] != '@':
t_ms_(ms)
def t_ms_(ms):
for fn in self.find_matches(ms):
t_fn_(fn)
return t_ms_(spec) if isinstance(spec, MatchSpec) else t_fn_(spec)
def invalid_chains(self, spec, filter):
"""Constructs a set of 'dependency chains' for invalid specs.
A dependency chain is a tuple of MatchSpec objects, starting with
the requested spec, proceeding down the dependency tree, ending at
a specification that cannot be satisfied. Uses self.valid_ as a
filter, both to prevent chains and to allow other routines to
prune the list of valid packages with additional criteria.
Args:
spec: a package key or MatchSpec
filter: a dictionary of (fn,valid) pairs to be used when
testing for package validity.
Returns:
A list of tuples, or an empty list if the MatchSpec is valid.
"""
def chains_(spec, top=None):
if spec.name == top or self.valid(spec, filter):
return []
notfound = set()
specs = self.find_matches(spec) if isinstance(spec, MatchSpec) else [spec]
for fn in specs:
for m2 in self.ms_depends(fn):
notfound.update(chains_(m2))
return [(spec,) + x for x in notfound] if notfound else [(spec,)]
return chains_(spec)
def verify_specs(self, specs):
"""Perform a quick verification that specs and dependencies are reasonable.
Args:
specs: An iterable of strings or MatchSpec objects to be tested.
Returns:
Nothing, but if there is a conflict, an error is thrown.
Note that this does not attempt to resolve circular dependencies.
"""
bad_deps = []
opts = []
rems = []
spec2 = []
feats = set()
for s in specs:
ms = MatchSpec(s)
if ms.name[-1] == '@':
feats.add(ms.name[:-1])
continue
if ms.negate:
rems.append(MatchSpec(ms.spec))
if not ms.optional:
spec2.append(ms)
elif any(self.find_matches(ms)):
opts.append(ms)
for ms in spec2:
filter = self.default_filter(feats)
if not self.valid(ms, filter):
bad_deps.extend(self.invalid_chains(ms, filter))
if bad_deps:
raise NoPackagesFound(bad_deps)
return spec2, rems, opts, feats
def get_dists(self, specs):
log.debug('Retrieving packages for: %s' % specs)
specs, removes, optional, features = self.verify_specs(specs)
filter = {}
touched = {}
snames = set()
nspecs = set()
unsat = []
def filter_group(matches, chains=None):
# If we are here, then this dependency is mandatory,
# so add it to the master list. That way it is still
# participates in the pruning even if one of its
# parents is pruned away
match1 = next(ms for ms in matches)
name = match1.name
first = name not in snames
group = self.groups.get(name, [])
# Prune packages that don't match any of the patterns
# or which have unsatisfiable dependencies
nold = 0
bad_deps = []
for fn in group:
if filter.setdefault(fn, True):
nold += 1
sat = self.match_any(matches, fn)
sat = sat and all(any(filter.get(f2, True) for f2 in self.find_matches(ms))
for ms in self.ms_depends(fn))
filter[fn] = sat
if not sat:
bad_deps.append(fn)
# Build dependency chains if we detect unsatisfiability
nnew = nold - len(bad_deps)
reduced = nnew < nold
if reduced:
log.debug(' % s: pruned from %d -> %d' % (name, nold, nnew))
if nnew == 0:
if name in snames:
snames.remove(name)
bad_deps = [fn for fn in bad_deps if self.match_any(matches, fn)]
matches = [(ms,) for ms in matches]
chains = [a + b for a in chains for b in matches] if chains else matches
if bad_deps:
dep2 = set()
for fn in bad_deps:
for ms in self.ms_depends(fn):
if not any(filter.get(f2, True) for f2 in self.find_matches(ms)):
dep2.add(ms)
chains = [a + (b,) for a in chains for b in dep2]
unsat.extend(chains)
return nnew
if not reduced and not first:
return False
# Perform the same filtering steps on any dependencies shared across
# *all* packages in the group. Even if just one of the packages does
# not have a particular dependency, it must be ignored in this pass.
if first:
snames.add(name)
if match1 not in specs:
nspecs.add(MatchSpec(name))
cdeps = defaultdict(list)
for fn in group:
if filter[fn]:
for m2 in self.ms_depends(fn):
if m2.name[0] != '@' and not m2.optional:
cdeps[m2.name].append(m2)
cdeps = {mname: set(deps) for mname, deps in iteritems(cdeps) if len(deps) == nnew}
if cdeps:
matches = [(ms,) for ms in matches]
if chains:
matches = [a + b for a in chains for b in matches]
if sum(filter_group(deps, chains) for deps in itervalues(cdeps)):
reduced = True
return reduced
# Iterate in the filtering process until no more progress is made
def full_prune(specs, removes, optional, features):
self.default_filter(features, filter)
for ms in removes:
for fn in self.find_matches(ms):
filter[fn] = False
feats = set(self.trackers.keys())
snames.clear()
specs = slist = list(specs)
onames = set(s.name for s in specs)
for iter in range(10):
first = True
while sum(filter_group([s]) for s in slist):
slist = specs + [MatchSpec(n) for n in snames - onames]
first = False
if unsat:
return False
if first and iter:
return True
touched.clear()
for fstr in features:
touched[fstr+'@'] = True
for spec in chain(specs, optional):
self.touch(spec, touched, filter)
nfeats = set()
for fn, val in iteritems(touched):
if val:
nfeats.update(self.track_features(fn))
if len(nfeats) >= len(feats):
return True
pruned = False
feats &= nfeats
for fn, val in iteritems(touched):
if val and self.features(fn) - feats:
touched[fn] = filter[fn] = False
filter[fn] = False
pruned = True
if not pruned:
return True
#
# In the case of a conflict, look for the minimum satisfiable subset
#
if not full_prune(specs, removes, optional, features):
def minsat_prune(specs):
return full_prune(specs, removes, [], features)
save_unsat = set(s for s in unsat if s[0] in specs)
stderrlog.info('...')
hint = minimal_unsatisfiable_subset(specs, sat=minsat_prune, log=False)
save_unsat.update((ms,) for ms in hint)
raise Unsatisfiable(save_unsat)
dists = {fn: self.index[fn] for fn, val in iteritems(touched) if val}
return dists, list(map(MatchSpec, snames - {ms.name for ms in specs}))
def match_any(self, mss, fn):
rec = self.index[fn]
n, v, b = rec['name'], rec['version'], rec['build']
return any(n == ms.name and ms.match_fast(v, b) for ms in mss)
def match(self, ms, fn):
return MatchSpec(ms).match(self.index[fn])
def find_matches_group(self, ms, groups, trackers=None):
ms = MatchSpec(ms)
if ms.name[0] == '@' and trackers:
for fn in trackers.get(ms.name[1:], []):
yield fn
else:
for fn in groups.get(ms.name, []):
rec = self.index[fn]
if ms.match_fast(rec['version'], rec['build']):
yield fn
def find_matches(self, ms):
ms = MatchSpec(ms)
res = self.find_matches_.get(ms, None)
if res is None:
if ms.name[0] == '@':
res = self.find_matches_[ms] = self.trackers.get(ms.name[1:], [])
else:
res = self.find_matches_[ms] = list(self.find_matches_group(ms, self.groups))
return res
def ms_depends(self, fn):
deps = self.ms_depends_.get(fn, None)
if deps is None:
if fn[-1] == ']':
fn2, fstr = fn[:-1].split('[')
fdeps = {d.name: d for d in self.ms_depends(fn2)}
for dep in self.index[fn2]['with_features_depends'][fstr]:
dep = MatchSpec(dep)
fdeps[dep.name] = dep
deps = list(fdeps.values())
else:
deps = [MatchSpec(d) for d in self.index[fn].get('depends', [])]
deps.extend(MatchSpec('@'+feat) for feat in self.features(fn))
self.ms_depends_[fn] = deps
return deps
def version_key(self, fn, vtype=None):
rec = self.index[fn]
return (normalized_version(rec['version']), rec['build_number'])
def features(self, fn):
return set(self.index[fn].get('features', '').split())
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
def package_triple(self, fn):
if not fn.endswith('.tar.bz2'):
return self.package_triple(fn + '.tar.bz2')
rec = self.index.get(fn, None)
if rec is None:
return fn[:-8].rsplit('-', 2)
return (rec['name'], rec['version'], rec['build'])
def package_name(self, fn):
return self.package_triple(fn)[0]
def get_pkgs(self, ms, emptyok=False):
ms = MatchSpec(ms)
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs and not emptyok:
raise NoPackagesFound([(ms,)])
return pkgs
@staticmethod
def ms_to_v(ms):
return '@s@' + ms.spec + ('!' if ms.negate else '')
@staticmethod
def feat_to_v(feat):
return '@s@@' + feat
def gen_clauses(self, groups, trackers, specs):
C = Clauses()
# Creates a variable that represents the proposition:
# Does the package set include a package that matches MatchSpec "ms"?
def push_MatchSpec(ms):
name = self.ms_to_v(ms)
m = C.from_name(name)
if m is None:
libs = [fn for fn in self.find_matches_group(ms, groups, trackers)]
# If the MatchSpec is optional, then there may be cases where we want
# to assert that it is *not* True. This requires polarity=None.
m = C.Any(libs, polarity=None if ms.optional else True, name=name)
return m
# Creates a variable that represents the proposition:
# Does the package set include package "fn"?
for group in itervalues(groups):
for fn in group:
C.new_var(fn)
# Install no more than one version of each package
C.Require(C.AtMostOne, group)
# Create a variable that represents the proposition:
# Is the feature "name" active in this package set?
# We mark this as "optional" below because sometimes we need to be able to
# assert the proposition is False during the feature minimization pass.
for name in iterkeys(trackers):
ms = MatchSpec('@' + name)
ms.optional = True
push_MatchSpec(ms)
# Create a variable that represents the proposition:
# Is the MatchSpec "ms" satisfied by the current package set?
for ms in specs:
push_MatchSpec(ms)
# Create propositions that assert:
# If package "fn" is installed, its dependencie must be satisfied
for group in itervalues(groups):
for fn in group:
for ms in self.ms_depends(fn):
if not ms.optional:
C.Require(C.Or, C.Not(fn), push_MatchSpec(ms))
return C
def generate_spec_constraints(self, C, specs):
return [(self.ms_to_v(ms),) for ms in specs if not ms.optional]
def generate_feature_count(self, C, trackers):
return {self.feat_to_v(name): 1 for name in iterkeys(trackers)}
def generate_feature_metric(self, C, groups, specs):
eq = {}
total = 0
for name, group in iteritems(groups):
nf = [len(self.features(fn)) for fn in group]
maxf = max(nf)
eq.update({fn: maxf-fc for fn, fc in zip(group, nf) if fc < maxf})
total += maxf
return eq, total
def generate_removal_count(self, C, specs):
return {'!'+self.ms_to_v(ms): 1 for ms in specs}
def generate_version_metrics(self, C, groups, specs,
missing=False, start0=True):
eqv = {}
eqb = {}
sdict = {}
for s in specs:
s = MatchSpec(s) # needed for testing
sdict.setdefault(s.name, []).append(s)
for name, pkgs in iteritems(groups):
mss = sdict.get(name, [])
bmss = bool(mss)
if bmss == missing:
continue
pkgs = [(self.version_key(p), p) for p in pkgs]
# If the "target" field in the MatchSpec is supplied, that means we want
# to minimize the changes to the currently installed package. We prefer
# any upgrade over any downgrade, but beyond that we want minimal change.
targets = [ms.target for ms in mss if ms.target]
if targets:
tver = max(self.version_key(p) for p in targets)
v1 = [p for p in pkgs if p[1] in targets]
v2 = sorted((p for p in pkgs if p[0] >= tver and p[-1] not in targets))
v3 = sorted((p for p in pkgs if p[0] < tver), reverse=True)
pkgs = v1 + v2 + v3
else:
pkgs = sorted(pkgs, reverse=True)
pkey = ppkg = None
for nkey, npkg in pkgs:
if pkey is None:
iv = 0 if start0 else 1
ib = 0
elif pkey[0] != nkey[0]:
iv += 1
ib = 0
elif pkey[1] != nkey[1]:
ib += 1
if iv:
eqv[npkg] = iv
if ib:
eqb[npkg] = ib
pkey, ppkg = nkey, npkg
return eqv, eqb
def dependency_sort(self, must_have):
def lookup(value):
return set(ms.name for ms in self.ms_depends(value + '.tar.bz2'))
digraph = {}
if not isinstance(must_have, dict):
must_have = {self.package_name(dist): dist for dist in must_have}
for key, value in iteritems(must_have):
depends = lookup(value)
digraph[key] = depends
sorted_keys = toposort(digraph)
must_have = must_have.copy()
# Take all of the items in the sorted keys
# Don't fail if the key does not exist
result = [must_have.pop(key) for key in sorted_keys if key in must_have]
# Take any key that were not sorted
result.extend(must_have.values())
return result
# name deprecated; use dependency_sort instead
def graph_sort(self, must_have):
return self.dependency_sort(must_have)
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
specs = list(map(MatchSpec, specs))
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
if fn not in self.index:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [spec.to_filename() for spec in specs if str(spec) != 'conda']
if None in res:
return None
res.sort()
dotlog.debug('explicit(%r) finished' % specs)
return res
def sum_matches(self, fn1, fn2):
return sum(self.match(ms, fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version)):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def bad_installed(self, installed, new_specs):
log.debug('Checking if the current environment is consistent')
if not installed:
return None, []
xtra = []
dists = {}
specs = []
for fn in installed:
rec = self.index.get(fn)
if rec is None:
xtra.append(fn)
else:
dists[fn] = rec
specs.append(MatchSpec(' '.join(self.package_triple(fn))))
if xtra:
log.debug('Packages missing from index: %s' % ', '.join(xtra))
groups, trackers = build_groups(dists)
C = self.gen_clauses(groups, trackers, specs)
constraints = self.generate_spec_constraints(C, specs)
solution = C.sat(constraints)
limit = None
if not solution or xtra:
def get_(name, snames):
if name not in snames:
snames.add(name)
for fn in self.groups.get(name, []):
for ms in self.ms_depends(fn):
get_(ms.name, snames)
snames = set()
for spec in new_specs:
get_(MatchSpec(spec).name, snames)
xtra = [x for x in xtra if x not in snames]
if xtra or not (solution or all(s.name in snames for s in specs)):
limit = set(s.name for s in specs if s.name in snames)
xtra = [fn for fn in installed if self.package_name(fn) not in snames]
log.debug(
'Limiting solver to the following packages: %s' %
', '.join(limit))
if xtra:
log.debug('Packages to be preserved: %s' % ', '.join(xtra))
return limit, xtra
def restore_bad(self, pkgs, preserve):
if preserve:
sdict = {self.package_name(pkg): pkg for pkg in pkgs}
pkgs.extend(p for p in preserve if self.package_name(p) not in sdict)
def install_specs(self, specs, installed, update_deps=True):
specs = list(map(MatchSpec, specs))
snames = {s.name for s in specs}
log.debug('Checking satisfiability of current install')
limit, preserve = self.bad_installed(installed, specs)
for pkg in installed:
if pkg not in self.index:
continue
name, version, build = self.package_triple(pkg)
if name in snames or limit is not None and name not in limit:
continue
# If update_deps=True, set the target package in MatchSpec so that
# the solver can minimize the version change. If update_deps=False,
# fix the version and build so that no change is possible.
if update_deps:
spec = MatchSpec(name, target=pkg)
else:
spec = MatchSpec(' % s %s %s' % (name, version, build))
specs.append(spec)
return specs, preserve
def install(self, specs, installed=None, update_deps=True, returnall=False):
len0 = len(specs)
specs, preserve = self.install_specs(specs, installed or [], update_deps)
pkgs = self.solve(specs, len0=len0, returnall=returnall)
self.restore_bad(pkgs, preserve)
return pkgs
def remove_specs(self, specs, installed):
specs = [MatchSpec(s, optional=True, negate=True) for s in specs]
snames = {s.name for s in specs}
limit, _ = self.bad_installed(installed, specs)
preserve = []
for pkg in installed:
nm = self.package_name(pkg)
if nm in snames:
continue
elif limit is None:
specs.append(MatchSpec(self.package_name(pkg), optional=True, target=pkg))
else:
preserve.append(pkg)
return specs, preserve
def remove(self, specs, installed):
specs, preserve = self.remove_specs(specs, installed)
pkgs = self.solve(specs)
self.restore_bad(pkgs, preserve)
return pkgs
def solve(self, specs, len0=None, returnall=False):
try:
stdoutlog.info("Solving package specifications: ")
dotlog.debug("Solving for %s" % (specs,))
# Find the compliant packages
specs = list(map(MatchSpec, specs))
if len0 is None:
len0 = len(specs)
dists, new_specs = self.get_dists(specs)
if not dists:
return False if dists is None else ([[]] if returnall else [])
# Check if satisfiable
dotlog.debug('Checking satisfiability')
groups, trackers = build_groups(dists)
C = self.gen_clauses(groups, trackers, specs)
constraints = self.generate_spec_constraints(C, specs)
solution = C.sat(constraints, True)
if not solution:
# Find the largest set of specs that are satisfiable, and return
# the list of specs that are not in that set.
solution = [C.Not(q) for q in range(1, C.m+1)]
spec2 = [s for s in specs if not s.optional]
eq_removal_count = self.generate_removal_count(C, spec2)
solution, obj1 = C.minimize(eq_removal_count, solution)
specsol = [(s,) for s in spec2 if C.from_name(self.ms_to_v(s)) not in solution]
raise Unsatisfiable(specsol, False)
specs.extend(new_specs)
# Optional packages: maximize count, then versions, then builds
speco = [s for s in specs if s.optional and
any(self.find_matches_group(s, groups, trackers))]
eq_optional_count = self.generate_removal_count(C, speco)
solution, obj7 = C.minimize(eq_optional_count, solution)
dotlog.debug('Package removal metric: %d' % obj7)
nz = len(C.clauses)
nv = C.m
# Requested packages: maximize versions, then builds
eq_requested_versions, eq_requested_builds = self.generate_version_metrics(
C, groups, (s for s in specs[:len0] if not s.optional))
solution, obj3 = C.minimize(eq_requested_versions, solution)
solution, obj4 = C.minimize(eq_requested_builds, solution)
dotlog.debug('Initial package version/build metrics: %d/%d' % (obj3, obj4))
# Minimize the number of installed track_features, maximize featured package count
eq_feature_count = self.generate_feature_count(C, trackers)
solution, obj1 = C.minimize(eq_feature_count, solution)
dotlog.debug('Track feature count: %d' % obj1)
# Now that we have the feature count, lock it in and re-optimize
C.clauses = C.clauses[:nz]
C.m = nv
C.Require(C.LinearBound, eq_feature_count, obj1, obj1)
solution = C.sat()
eq_feature_metric, ftotal = self.generate_feature_metric(C, groups, specs)
solution, obj2 = C.minimize(eq_feature_metric, solution)
obj2 = ftotal - obj2
dotlog.debug('Package feature count: %d' % obj2)
# Re-optimize requested packages: maximize versions, then builds
solution, obj3 = C.minimize(eq_requested_versions, solution)
solution, obj4 = C.minimize(eq_requested_builds, solution)
dotlog.debug('Requested package version/build metrics: %d/%d' % (obj3, obj4))
# Required packages: maximize versions, then builds
eq_required_versions, eq_required_builds = self.generate_version_metrics(
C, groups, (s for s in specs[len0:] if not s.optional))
solution, obj5 = C.minimize(eq_required_versions, solution)
solution, obj6 = C.minimize(eq_required_builds, solution)
dotlog.debug('Required package version/build metrics: %d/%d' % (obj5, obj6))
# Optional packages: maximize count, then versions, then builds
eq_optional_versions, eq_optional_builds = self.generate_version_metrics(
C, groups, speco)
solution, obj8 = C.minimize(eq_optional_versions, solution)
solution, obj9 = C.minimize(eq_optional_builds, solution)
dotlog.debug('Optional package version/build metrics: %d/%d' % (obj8, obj9))
# All other packages: maximize versions (favoring none), then builds
eq_remaining_versions, eq_remaining_builds = self.generate_version_metrics(
C, groups, specs, missing=True, start0=False)
solution, obj10 = C.minimize(eq_remaining_versions, solution)
solution, obj11 = C.minimize(eq_remaining_builds, solution)
dotlog.debug('Additional package version/build metrics: %d/%d' % (obj10, obj11))
def clean(sol):
return [q for q in (C.from_index(s) for s in sol)
if q and q[0] != '!' and '@' not in q]
dotlog.debug('Looking for alternate solutions')
nsol = 1
psolutions = []
psolution = clean(solution)
psolutions.append(psolution)
while True:
nclause = tuple(C.Not(C.from_name(q)) for q in psolution)
solution = C.sat((nclause,), True)
if solution is None:
break
nsol += 1
if nsol > 10:
dotlog.debug('Too many solutions; terminating')
break
psolution = clean(solution)
psolutions.append(psolution)
if nsol > 1:
psols2 = list(map(set, psolutions))
common = set.intersection(*psols2)
diffs = [sorted(set(sol) - common) for sol in psols2]
stdoutlog.info(
'\nWarning: %s possible package resolutions '
'(only showing differing packages):%s%s' %
('>10' if nsol > 10 else nsol,
dashlist(', '.join(diff) for diff in diffs),
'\n ... and others' if nsol > 10 else ''))
def stripfeat(sol):
return sol.split('[')[0]
stdoutlog.info('\n')
if returnall:
return [sorted(map(stripfeat, psol)) for psol in psolutions]
else:
return sorted(map(stripfeat, psolutions[0]))
except:
stdoutlog.info('\n')
raise
| conda/resolve.py
--- a/conda/resolve.py
+++ b/conda/resolve.py
@@ -1014,8 +1014,13 @@ def clean(sol):
dashlist(', '.join(diff) for diff in diffs),
'\n ... and others' if nsol > 10 else ''))
+ def stripfeat(sol):
+ return sol.split('[')[0]
stdoutlog.info('\n')
- return list(map(sorted, psolutions)) if returnall else sorted(psolutions[0])
+ if returnall:
+ return [sorted(map(stripfeat, psol)) for psol in psolutions]
+ else:
+ return sorted(map(stripfeat, psolutions[0]))
except:
stdoutlog.info('\n')
raise |
activate on master is broken in OSX
@msarahan I'm on master, and I get:
```
$ source activate koo
path:usage: conda [-h] [-V] [--debug] command ...
conda: error: argument command: invalid choice: '..changeps1' (choose from 'info', 'help', 'list', 'search', 'create', 'install', 'update', 'upgrade', 'remove', 'uninstall', 'run', 'config', 'init', 'clean', 'package', 'bundle')
prepending /Users/ilan/python/envs/koo and /Users/ilan/python/envs/koo/cmd and /Users/ilan/python/envs/koo/bin to PATH
-bash: awk: command not found
-bash: dirname: command not found
Traceback (most recent call last):
File "/Users/ilan/python/bin/conda", line 4, in <module>
from conda.cli.main import main
File "/Users/ilan/conda/conda/__init__.py", line 19, in <module>
__version__ = get_version(__file__, __name__)
File "/Users/ilan/python/lib/python2.7/site-packages/auxlib/packaging.py", line 93, in get_version
if is_git_repo(here):
File "/Users/ilan/python/lib/python2.7/site-packages/auxlib/packaging.py", line 70, in is_git_repo
return call(('git', 'rev-parse'), cwd=path) == 0
File "/Users/ilan/python/lib/python2.7/subprocess.py", line 522, in call
return Popen(*popenargs, **kwargs).wait()
File "/Users/ilan/python/lib/python2.7/subprocess.py", line 710, in __init__
errread, errwrite)
File "/Users/ilan/python/lib/python2.7/subprocess.py", line 1335, in _execute_child
raise child_exception
```
It is true that the new activate scripts depend on `awk` and `dirname`?
| conda/cli/activate.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, join, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import translate_stream, unix_path_to_win, win_path_to_unix, win_path_to_cygwin, find_parent_shell
on_win = sys.platform == "win32"
def help():
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
win_process = find_parent_shell()
if sys.argv[1] in ('..activate', '..checkenv'):
if on_win and win_process in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
adds the 'Scripts' and 'Library\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else: # ..deactivate
if on_win and win_process in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the 'Scripts' and 'Library\bin' directory of the environment ENV to the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
def prefix_from_arg(arg):
if os.sep in arg:
if isdir(abspath(arg.strip("\""))):
prefix = abspath(arg.strip("\""))
else:
sys.exit('Error: could not find environment: %s' % arg)
else:
prefix = find_prefix_name(arg)
if prefix is None:
sys.exit('Error: could not find environment: %s' % arg)
return prefix
def binpath_from_arg(arg):
prefix = prefix_from_arg(arg)
if on_win:
path = [prefix.rstrip("\\"),
join(prefix, 'cmd'),
join(prefix, 'Scripts'),
join(prefix, 'Library', 'bin'),
]
else:
path = [prefix.rstrip("/"),
join(prefix, 'cmd'),
join(prefix, 'bin'),
]
return path
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def main():
import conda.config
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
help()
path = os.getenv("PATH")
# This one is because we force Library/bin to be on PATH on windows. Strip it off here.
if on_win:
path = path.replace(join(sys.prefix, "Library", "bin")+os.pathsep, "", 1)
parent_shell = find_parent_shell(path=True)
if sys.argv[1] == '..activate':
if len(sys.argv) == 2 or sys.argv[2].lower() == "root":
binpath = binpath_from_arg("root")
rootpath = None
elif len(sys.argv) == 3:
base_path = sys.argv[2]
binpath = binpath_from_arg(base_path)
rootpath = os.pathsep.join(binpath_from_arg("root"))
else:
sys.exit("Error: did not expect more than one argument")
sys.stderr.write("prepending %s to PATH\n" % pathlist_to_str(binpath))
path = os.pathsep.join([os.pathsep.join(binpath), path])
if any([shell in parent_shell for shell in ["cmd.exe", "powershell.exe"]]):
path = translate_stream(path, unix_path_to_win)
# Clear the root path if it is present
if rootpath:
path = path.replace(translate_stream(rootpath, unix_path_to_win), "")
elif 'cygwin' in parent_shell:
# this should be harmless to unix paths, but converts win paths to unix for bash on win (msys, cygwin)
path = translate_stream(path, win_path_to_cygwin)
# Clear the root path if it is present
if rootpath:
path = path.replace(translate_stream(rootpath, win_path_to_cygwin), "")
else:
path = translate_stream(path, win_path_to_unix)
# Clear the root path if it is present
if rootpath:
path = path.replace(translate_stream(rootpath, win_path_to_unix), "")
elif sys.argv[1] == '..deactivate':
path = os.getenv("CONDA_PATH_BACKUP", "")
sys.stderr.write("path:")
sys.stderr.write(path)
if path:
sys.stderr.write("Restoring PATH to deactivated state\n")
else:
path = os.getenv("PATH") # effectively a no-op; just set PATH to what it already is
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 3:
sys.exit("Error: no environment provided.")
if len(sys.argv) > 3:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[2] == 'root':
# no need to check root env and try to install a symlink there
sys.exit(0)
binpath = binpath_from_arg(sys.argv[2]) # this should throw an error and exit if the env or path can't be found.
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(binpath[0], conda.config.root_dir, find_parent_shell())
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
sys.exit("Cannot activate environment {}, do not have write access to write conda symlink".format(sys.argv[2]))
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
path = sys.argv[3]
if not path:
if on_win:
path = os.getenv("PROMPT", "$P$G")
else:
# zsh uses prompt. If it exists, prefer it.
path = os.getenv("PROMPT")
# fall back to bash default
if not path:
path = os.getenv("PS1")
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[2]
if conda.config.changeps1 and env_path:
path = "(({})) {}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
<|code_end|>
conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
try:
from conda.utils import win_path_to_unix
except ImportError:
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
import re
path_re = '[a-zA-Z]:[/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;/\\\\]*'
converted_paths = [root_prefix + "/" + _path.replace("\\", "/").replace(":", "")
for _path in re.findall(path_re, path)]
return ":".join(converted_paths)
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
if src.endswith("conda"):
src = src + ".exe"
path_prefix = ""
if 'cygwin' in shell.lower():
path_prefix = '/cygdrive'
src = win_path_to_unix(src, path_prefix)
dst = win_path_to_unix(dst, path_prefix)
subprocess.check_call(["bash", "-l", "-c",
'ln -sf "%s" "%s"' % (src, dst)])
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if f.lower().startswith('menu/')
and f.lower().endswith('.json')]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = {where: ["conda"],
'cmd': ["activate", "deactivate"],
}
for where, files in scripts.items():
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in files:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
res = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
try:
res[fn[:-5]] = json.load(open(join(meta_dir,fn)))
except IOError:
pass
return res
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(linked_dists, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in linked_dists:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
conda/utils.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import logging
import sys
import hashlib
import collections
from functools import partial
from os.path import abspath, isdir
import os
import re
import tempfile
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
def can_open(file):
"""
Return True if the given ``file`` can be opened for writing
"""
try:
fp = open(file, "ab")
fp.close()
return True
except IOError:
stderrlog.info("Unable to open %s\n" % file)
return False
def can_open_all(files):
"""
Return True if all of the provided ``files`` can be opened
"""
for f in files:
if not can_open(f):
return False
return True
def can_open_all_files_in_prefix(prefix, files):
"""
Returns True if all ``files`` at a given ``prefix`` can be opened
"""
return can_open_all((os.path.join(prefix, f) for f in files))
def try_write(dir_path):
assert isdir(dir_path)
try:
with tempfile.TemporaryFile(prefix='.conda-try-write',
dir=dir_path) as fo:
fo.write(b'This is a test file.\n')
return True
except (IOError, OSError):
return False
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))'
translation = lambda found_path: root_prefix + "/" + found_path.groups()[0].replace("\\", "/")\
.replace(":", "").replace(";", ":")
return re.sub(path_re, translation, path)
def unix_path_to_win(path, root_prefix=""):
"""Convert a path or :-separated string of paths into a Windows representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
# already a windows path
return path.replace("/", "\\")
"""Convert a path or :-separated string of paths into a Windows representation"""
path_re = root_prefix +'(/[a-zA-Z]\/(?:[^:*?"<>|]+\/)*[^:*?"<>|;]*)'
translation = lambda found_path: found_path.group(0)[len(root_prefix)+1] + ":" + \
found_path.group(0)[len(root_prefix)+2:].replace("/", "\\")
translation = re.sub(path_re, translation, path)
translation = re.sub(":([a-zA-Z]):", lambda match: ";" + match.group(0)[1] + ":", translation)
return translation
# curry cygwin functions
win_path_to_cygwin = lambda path : win_path_to_unix(path, "/cygdrive")
cygwin_path_to_win = lambda path : unix_path_to_win(path, "/cygdrive")
def translate_stream(stream, translator):
return "\n".join([translator(line) for line in stream.split("\n")])
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(kw.items()))
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(kw.items()))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
def find_parent_shell(path=False):
"""return process name or path of parent. Default is to return only name of process."""
try:
import psutil
except ImportError:
sys.exit("No psutil available.\n"
"To proceed, please conda install psutil")
process = psutil.Process()
while "conda" in process.parent().name():
process = process.parent()
if path:
return process.parent().exe()
return process.parent().name()
@memoized
def get_yaml():
try:
import raml as yaml
except ImportError:
try:
import yaml
except ImportError:
sys.exit("No yaml library available.\n"
"To proceed, please conda install raml")
return yaml
<|code_end|>
| conda/cli/activate.py
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, join, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import translate_stream, unix_path_to_win, win_path_to_unix, win_path_to_cygwin, find_parent_shell
on_win = sys.platform == "win32"
def help():
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
win_process = find_parent_shell()
if sys.argv[1] in ('..activate', '..checkenv'):
if on_win and win_process in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
adds the 'Scripts' and 'Library\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else: # ..deactivate
if on_win and win_process in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the 'Scripts' and 'Library\bin' directory of the environment ENV to the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
def prefix_from_arg(arg):
if os.sep in arg:
if isdir(abspath(arg.strip("\""))):
prefix = abspath(arg.strip("\""))
else:
sys.exit('Error: could not find environment: %s' % arg)
else:
prefix = find_prefix_name(arg)
if prefix is None:
sys.exit('Error: could not find environment: %s' % arg)
return prefix
def binpath_from_arg(arg):
prefix = prefix_from_arg(arg)
if on_win:
path = [prefix.rstrip("\\"),
join(prefix, 'cmd'),
join(prefix, 'Scripts'),
join(prefix, 'Library', 'bin'),
]
else:
path = [prefix.rstrip("/"),
join(prefix, 'cmd'),
join(prefix, 'bin'),
]
return path
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def main():
import conda.config
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
help()
path = os.getenv("PATH")
# This one is because we force Library/bin to be on PATH on windows. Strip it off here.
if on_win:
path = path.replace(join(sys.prefix, "Library", "bin")+os.pathsep, "", 1)
parent_shell = find_parent_shell(path=True)
if sys.argv[1] == '..activate':
if len(sys.argv) == 2 or sys.argv[2].lower() == "root":
binpath = binpath_from_arg("root")
rootpath = None
elif len(sys.argv) == 3:
base_path = sys.argv[2]
binpath = binpath_from_arg(base_path)
rootpath = os.pathsep.join(binpath_from_arg("root"))
else:
sys.exit("Error: did not expect more than one argument")
sys.stderr.write("prepending %s to PATH\n" % pathlist_to_str(binpath))
path = os.pathsep.join([os.pathsep.join(binpath), path])
if any([shell in parent_shell for shell in ["cmd.exe", "powershell.exe"]]):
path = translate_stream(path, unix_path_to_win)
# Clear the root path if it is present
if rootpath:
path = path.replace(translate_stream(rootpath, unix_path_to_win), "")
elif 'cygwin' in parent_shell:
# this should be harmless to unix paths, but converts win paths to unix for bash on win (msys, cygwin)
path = translate_stream(path, win_path_to_cygwin)
# Clear the root path if it is present
if rootpath:
path = path.replace(translate_stream(rootpath, win_path_to_cygwin), "")
else:
if sys.platform == 'win32':
path = translate_stream(path, win_path_to_unix)
if rootpath:
rootpath = translate_stream(rootpath, win_path_to_unix)
# Clear the root path if it is present
if rootpath:
path = path.replace(rootpath, "")
elif sys.argv[1] == '..deactivate':
path = os.getenv("CONDA_PATH_BACKUP", "")
sys.stderr.write("path:")
sys.stderr.write(path)
if path:
sys.stderr.write("Restoring PATH to deactivated state\n")
else:
path = os.getenv("PATH") # effectively a no-op; just set PATH to what it already is
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 3:
sys.exit("Error: no environment provided.")
if len(sys.argv) > 3:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[2] == 'root':
# no need to check root env and try to install a symlink there
sys.exit(0)
binpath = binpath_from_arg(sys.argv[2]) # this should throw an error and exit if the env or path can't be found.
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(binpath[0], conda.config.root_dir, find_parent_shell())
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
sys.exit("Cannot activate environment {}, do not have write access to write conda symlink".format(sys.argv[2]))
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
path = sys.argv[3]
if not path:
if on_win:
path = os.getenv("PROMPT", "$P$G")
else:
# zsh uses prompt. If it exists, prefer it.
path = os.getenv("PROMPT")
# fall back to bash default
if not path:
path = os.getenv("PS1")
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[2]
if conda.config.changeps1 and env_path:
path = "(({})) {}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
try:
from conda.utils import win_path_to_unix
except ImportError:
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))'
translation = lambda found_path: root_prefix + "/" + found_path.groups()[0].replace("\\", "/")\
.replace(":", "")
translation = re.sub(path_re, translation, path)
translation = translation.replace(";/", ":/")
return translation
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
if src.endswith("conda"):
src = src + ".exe"
path_prefix = ""
if 'cygwin' in shell.lower():
path_prefix = '/cygdrive'
src = win_path_to_unix(src, path_prefix)
dst = win_path_to_unix(dst, path_prefix)
subprocess.check_call(["bash", "-l", "-c",
'ln -sf "%s" "%s"' % (src, dst)])
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
import re
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache.
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if f.lower().startswith('menu/')
and f.lower().endswith('.json')]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = \
str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = {where: ["conda"],
'cmd': ["activate", "deactivate"],
}
for where, files in scripts.items():
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in files:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def fetched(pkgs_dir):
if not isdir(pkgs_dir):
return set()
return set(fn[:-8] for fn in os.listdir(pkgs_dir)
if fn.endswith('.tar.bz2'))
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
res = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
try:
res[fn[:-5]] = json.load(open(join(meta_dir,fn)))
except IOError:
pass
return res
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
# FIXME This should contain the implementation that loads meta, not is_linked()
def load_meta(prefix, dist):
return is_linked(prefix, dist)
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(linked_dists, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in linked_dists:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
conda/utils.py
from __future__ import print_function, division, absolute_import
import logging
import sys
import hashlib
import collections
from functools import partial
from os.path import abspath, isdir
import os
import re
import tempfile
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
def can_open(file):
"""
Return True if the given ``file`` can be opened for writing
"""
try:
fp = open(file, "ab")
fp.close()
return True
except IOError:
stderrlog.info("Unable to open %s\n" % file)
return False
def can_open_all(files):
"""
Return True if all of the provided ``files`` can be opened
"""
for f in files:
if not can_open(f):
return False
return True
def can_open_all_files_in_prefix(prefix, files):
"""
Returns True if all ``files`` at a given ``prefix`` can be opened
"""
return can_open_all((os.path.join(prefix, f) for f in files))
def try_write(dir_path):
assert isdir(dir_path)
try:
with tempfile.TemporaryFile(prefix='.conda-try-write',
dir=dir_path) as fo:
fo.write(b'This is a test file.\n')
return True
except (IOError, OSError):
return False
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))'
translation = lambda found_path: root_prefix + "/" + found_path.groups()[0].replace("\\", "/")\
.replace(":", "")
translation = re.sub(path_re, translation, path)
translation = translation.replace(";/", ":/")
return translation
def unix_path_to_win(path, root_prefix=""):
"""Convert a path or :-separated string of paths into a Windows representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
# already a windows path
return path.replace("/", "\\")
"""Convert a path or :-separated string of paths into a Windows representation"""
path_re = root_prefix +'(/[a-zA-Z]\/(?:[^:*?"<>|]+\/)*[^:*?"<>|;]*)'
translation = lambda found_path: found_path.group(0)[len(root_prefix)+1] + ":" + \
found_path.group(0)[len(root_prefix)+2:].replace("/", "\\")
translation = re.sub(path_re, translation, path)
translation = re.sub(":?([a-zA-Z]):\\\\", lambda match: ";" + match.group(0)[1] + ":\\", translation)
return translation
# curry cygwin functions
win_path_to_cygwin = lambda path : win_path_to_unix(path, "/cygdrive")
cygwin_path_to_win = lambda path : unix_path_to_win(path, "/cygdrive")
def translate_stream(stream, translator):
return "\n".join([translator(line) for line in stream.split("\n")])
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(kw.items()))
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(kw.items()))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
def find_parent_shell(path=False):
"""return process name or path of parent. Default is to return only name of process."""
try:
import psutil
except ImportError:
sys.exit("No psutil available.\n"
"To proceed, please conda install psutil")
process = psutil.Process()
while "conda" in process.parent().name():
process = process.parent()
if path:
return process.parent().exe()
return process.parent().name()
@memoized
def get_yaml():
try:
import raml as yaml
except ImportError:
try:
import yaml
except ImportError:
sys.exit("No yaml library available.\n"
"To proceed, please conda install raml")
return yaml
| conda/cli/activate.py
--- a/conda/cli/activate.py
+++ b/conda/cli/activate.py
@@ -124,10 +124,13 @@ def main():
if rootpath:
path = path.replace(translate_stream(rootpath, win_path_to_cygwin), "")
else:
- path = translate_stream(path, win_path_to_unix)
+ if sys.platform == 'win32':
+ path = translate_stream(path, win_path_to_unix)
+ if rootpath:
+ rootpath = translate_stream(rootpath, win_path_to_unix)
# Clear the root path if it is present
if rootpath:
- path = path.replace(translate_stream(rootpath, win_path_to_unix), "")
+ path = path.replace(rootpath, "")
elif sys.argv[1] == '..deactivate':
conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -66,11 +66,12 @@ def win_path_to_unix(path, root_prefix=""):
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
- import re
- path_re = '[a-zA-Z]:[/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;/\\\\]*'
- converted_paths = [root_prefix + "/" + _path.replace("\\", "/").replace(":", "")
- for _path in re.findall(path_re, path)]
- return ":".join(converted_paths)
+ path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))'
+ translation = lambda found_path: root_prefix + "/" + found_path.groups()[0].replace("\\", "/")\
+ .replace(":", "")
+ translation = re.sub(path_re, translation, path)
+ translation = translation.replace(";/", ":/")
+ return translation
on_win = bool(sys.platform == "win32")
conda/utils.py
--- a/conda/utils.py
+++ b/conda/utils.py
@@ -81,10 +81,12 @@ def win_path_to_unix(path, root_prefix=""):
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
- path_re = '(?<![:/])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))'
+ path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))'
translation = lambda found_path: root_prefix + "/" + found_path.groups()[0].replace("\\", "/")\
- .replace(":", "").replace(";", ":")
- return re.sub(path_re, translation, path)
+ .replace(":", "")
+ translation = re.sub(path_re, translation, path)
+ translation = translation.replace(";/", ":/")
+ return translation
def unix_path_to_win(path, root_prefix=""):
@@ -100,7 +102,7 @@ def unix_path_to_win(path, root_prefix=""):
translation = lambda found_path: found_path.group(0)[len(root_prefix)+1] + ":" + \
found_path.group(0)[len(root_prefix)+2:].replace("/", "\\")
translation = re.sub(path_re, translation, path)
- translation = re.sub(":([a-zA-Z]):", lambda match: ";" + match.group(0)[1] + ":", translation)
+ translation = re.sub(":?([a-zA-Z]):\\\\", lambda match: ";" + match.group(0)[1] + ":\\", translation)
return translation
|
BeeGFS hard-links
[BeeGFS](http://www.beegfs.com), a parallel cluster file system, [does not support](https://groups.google.com/forum/#!topic/fhgfs-user/cTJcqGZceVA) `hard-links` between files in differents directories.
Depending on the [configuration](https://groups.google.com/forum/#!topic/fhgfs-user/pvQSo0QWicw), either an error is issued, or a `symbolic-link` is created.
If a `symbolic-link` is created instead of a `hard-link`, it can cause problem, for example:
```
pkgs/bin/mpicc
pkgs/lib/libopen-pal.so
envs/bin/mpicc
envs/lib/libopen-pal.so
```
Here, when `envs/bin/mpicc` is executed, it is actually `pkgs/bin/mpicc` that is executed, and the library `$PREFIX/../lib/libopen-pal.so` actually loaded is `pkgs/lib/libopen-pal.so,` which is different from `envs/lib/libopen-pal.so` for which conda has fixed hard-coded prefix path, and as a final consequence `mpicc` fails to find its configuration file.
#805 is another (closed) related to `BeeGFS`.
Would we need a new conda options to turn hard-link into copy?
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
import re
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
try:
from conda.utils import win_path_to_unix
except ImportError:
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/bin/sh \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
return True
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
res = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
try:
with open(join(meta_dir, fn)) as fin:
res[fn[:-5]] = json.load(fin)
except IOError:
pass
return res
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(linked_dists, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in linked_dists:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
''' This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
'''
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
import re
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
try:
from conda.utils import win_path_to_unix
except ImportError:
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/bin/sh \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def update_prefix(path, new_prefix, placeholder=prefix_placeholder,
mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
data = fi.read()
if mode == 'text':
new_data = data.replace(placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
elif mode == 'binary':
new_data = binary_replace(data, placeholder.encode('utf-8'),
new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
if new_data == data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(new_data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def name_dist(dist):
return dist.rsplit('-', 2)[0]
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist + '.json'), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(pkgs_dir, dist):
try:
data = open(join(pkgs_dir, 'urls.txt')).read()
urls = data.split()
for url in urls[::-1]:
if url.endswith('/%s.tar.bz2' % dist):
return url
except IOError:
pass
return None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- fetched
def is_fetched(pkgs_dir, dist):
return isfile(join(pkgs_dir, dist + '.tar.bz2'))
def rm_fetched(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist + '.tar.bz2')
rm_rf(path)
# ------- package cache ----- extracted
def extracted(pkgs_dir):
"""
return the (set of canonical names) of all extracted packages
"""
if not isdir(pkgs_dir):
return set()
return set(dn for dn in os.listdir(pkgs_dir)
if (isfile(join(pkgs_dir, dn, 'info', 'files')) and
isfile(join(pkgs_dir, dn, 'info', 'index.json'))))
def extract(pkgs_dir, dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed packages is located in the packages directory.
"""
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
t = tarfile.open(path + '.tar.bz2')
t.extractall(path=path)
t.close()
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
def is_extracted(pkgs_dir, dist):
return (isfile(join(pkgs_dir, dist, 'info', 'files')) and
isfile(join(pkgs_dir, dist, 'info', 'index.json')))
def rm_extracted(pkgs_dir, dist):
with Locked(pkgs_dir):
path = join(pkgs_dir, dist)
rm_rf(path)
# ------- linkage of packages
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
res = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
try:
with open(join(meta_dir, fn)) as fin:
res[fn[:-5]] = json.load(fin)
except IOError:
pass
return res
def linked(prefix):
"""
Return the (set of canonical names) of linked packages in prefix.
"""
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
return set()
return set(fn[:-5] for fn in os.listdir(meta_dir) if fn.endswith('.json'))
# FIXME Functions that begin with `is_` should return True/False
def is_linked(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
meta_path = join(prefix, 'conda-meta', dist + '.json')
try:
with open(meta_path) as fi:
return json.load(fi)
except IOError:
return None
def delete_trash(prefix=None):
from conda import config
for pkg_dir in config.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f))
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda import config
for pkg_dir in config.pkgs_dirs:
import tempfile
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
return True
log.debug("Could not move %s to trash" % path)
return False
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
'''
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
'''
index = index or {}
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
source_dir = join(pkgs_dir, dist)
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(pkgs_dir, dist)
if meta_dict['url']:
meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
try:
alt_files_path = join(prefix, 'conda-meta', dist + '.files')
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'channel' in meta_dict:
meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
'''
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
'''
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta_path = join(prefix, 'conda-meta', dist + '.json')
with open(meta_path) as fi:
meta = json.load(fi)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
os.unlink(meta_path)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(linked_dists, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in linked_dists:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -515,7 +515,12 @@ def try_hard_link(pkgs_dir, prefix, dist):
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
- return True
+ # Some file systems (at least BeeGFS) do not support hard-links
+ # between files in different directories. Depending on the
+ # file system configuration, a symbolic link may be created
+ # instead. If a symbolic link is created instead of a hard link,
+ # return False.
+ return not os.path.islink(dst)
except OSError:
return False
finally: |
conda info --json and package lookup
If you set the `--json` flag for `conda info` when searching for packages, you sometimes get nothing:
``` bash
$ conda info numpy=1.11.0=py35_0
Fetching package metadata: ....
numpy 1.11.0 py35_0
-------------------
file name : numpy-1.11.0-py35_0.tar.bz2
name : numpy
version : 1.11.0
build number: 0
build string: py35_0
channel : defaults
size : 6.1 MB
date : 2016-03-28
license : BSD
md5 : 1900998c19c5e310687013f95374bba2
installed environments:
dependencies:
mkl 11.3.1
python 3.5*
$ conda info --json numpy=1.11.0=py35_0
{}
```
Things work fine for `conda info --json numpy`, so it's something with the spec format.
conda info:
```
platform : linux-64
conda version : 4.0.5
conda-build version : not installed
python version : 2.7.11.final.0
requests version : 2.9.1
root environment : /opt/conda (writable)
default environment : /opt/conda
envs directories : /opt/conda/envs
package cache : /opt/conda/pkgs
channel URLs : https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : None
is foreign system : False
```
| conda/cli/main_info.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import json
import os
import re
import sys
from collections import defaultdict, OrderedDict
from itertools import chain
from os import listdir
from os.path import exists, expanduser, join
from conda.cli import common
from conda.compat import iteritems
help = "Display information about current conda install."
example = """
Examples:
conda info -a
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'info',
description=help,
help=help,
epilog=example,
)
common.add_parser_json(p)
p.add_argument(
'-a', "--all",
action="store_true",
help="Show all information, (environments, license, and system "
"information.")
p.add_argument(
'-e', "--envs",
action="store_true",
help="List all known conda environments.",
)
p.add_argument(
'-l', "--license",
action="store_true",
help="Display information about the local conda licenses list.",
)
p.add_argument(
'-s', "--system",
action="store_true",
help="List environment variables.",
)
p.add_argument(
'packages',
action="store",
nargs='*',
help="Display information about packages.",
)
p.add_argument(
'--root',
action='store_true',
help='Display root environment path.',
)
p.add_argument(
'--unsafe-channels',
action='store_true',
help='Display list of channels with tokens exposed.',
)
p.set_defaults(func=execute)
def show_pkg_info(name):
from conda.api import get_index
from conda.resolve import Resolve
index = get_index()
r = Resolve(index)
print(name)
if name in r.groups:
for pkg in sorted(r.get_pkgs(name)):
print(' %-15s %15s %s' % (
pkg.version,
pkg.build,
common.disp_features(r.features(pkg.fn))))
else:
print(' not available')
# TODO
python_re = re.compile('python\d\.\d')
def get_user_site():
site_dirs = []
if sys.platform != 'win32':
if exists(expanduser('~/.local/lib')):
for path in listdir(expanduser('~/.local/lib/')):
if python_re.match(path):
site_dirs.append("~/.local/lib/%s" % path)
else:
if 'APPDATA' not in os.environ:
return site_dirs
APPDATA = os.environ['APPDATA']
if exists(join(APPDATA, 'Python')):
site_dirs = [join(APPDATA, 'Python', i) for i in
listdir(join(APPDATA, 'PYTHON'))]
return site_dirs
def pretty_package(pkg):
import conda.config as config
from conda.utils import human_bytes
d = OrderedDict([
('file name', pkg.fn),
('name', pkg.name),
('version', pkg.version),
('build number', pkg.build_number),
('build string', pkg.build),
('channel', config.canonical_channel_name(pkg.channel)),
('size', human_bytes(pkg.info['size'])),
])
rest = pkg.info
for key in sorted(rest):
if key in {'build', 'depends', 'requires', 'channel', 'name',
'version', 'build_number', 'size'}:
continue
d[key] = rest[key]
print()
header = "%s %s %s" % (d['name'], d['version'], d['build string'])
print(header)
print('-'*len(header))
for key in d:
print("%-12s: %s" % (key, d[key]))
print('dependencies:')
for dep in pkg.info['depends']:
print(' %s' % dep)
def execute(args, parser):
import os
from os.path import dirname
import conda
import conda.config as config
from conda.resolve import Resolve
from conda.cli.main_init import is_initialized
from conda.api import get_index, get_package_versions
if args.root:
if args.json:
common.stdout_json({'root_prefix': config.root_dir})
else:
print(config.root_dir)
return
if args.packages:
if args.json:
results = defaultdict(list)
for arg in args.packages:
for pkg in get_package_versions(arg):
results[arg].append(pkg._asdict())
common.stdout_json(results)
return
index = get_index()
r = Resolve(index)
specs = map(common.arg2spec, args.packages)
for spec in specs:
versions = r.get_pkgs(spec)
for pkg in sorted(versions):
pretty_package(pkg)
return
options = 'envs', 'system', 'license'
try:
import requests
requests_version = requests.__version__
except ImportError:
requests_version = "could not import"
except Exception as e:
requests_version = "Error %s" % e
try:
import conda_build
except ImportError:
conda_build_version = "not installed"
except Exception as e:
conda_build_version = "Error %s" % e
else:
conda_build_version = conda_build.__version__
# this is a hack associated with channel weight until we get the package cache reworked
# in a future release
# for now, just ordering the channels for display in a semi-plausible way
d = defaultdict(list)
any(d[v[1]].append(k) for k, v in iteritems(config.get_channel_urls()))
channels = list(chain.from_iterable(d[q] for q in sorted(d, reverse=True)))
info_dict = dict(
platform=config.subdir,
conda_version=conda.__version__,
conda_build_version=conda_build_version,
root_prefix=config.root_dir,
root_writable=config.root_writable,
pkgs_dirs=config.pkgs_dirs,
envs_dirs=config.envs_dirs,
default_prefix=config.default_prefix,
channels=channels,
rc_path=config.rc_path,
user_rc_path=config.user_rc_path,
sys_rc_path=config.sys_rc_path,
is_foreign=bool(config.foreign),
envs=[],
python_version='.'.join(map(str, sys.version_info)),
requests_version=requests_version,
)
if args.unsafe_channels:
if not args.json:
print("\n".join(info_dict["channels"]))
else:
print(json.dumps({"channels": info_dict["channels"]}))
return 0
else:
info_dict['channels'] = [config.hide_binstar_tokens(c) for c in
info_dict['channels']]
if args.all or args.json:
for option in options:
setattr(args, option, True)
if args.all or all(not getattr(args, opt) for opt in options):
for key in 'pkgs_dirs', 'envs_dirs', 'channels':
info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key])
info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else
'read only')
print("""\
Current conda install:
platform : %(platform)s
conda version : %(conda_version)s
conda-build version : %(conda_build_version)s
python version : %(python_version)s
requests version : %(requests_version)s
root environment : %(root_prefix)s (%(_rtwro)s)
default environment : %(default_prefix)s
envs directories : %(_envs_dirs)s
package cache : %(_pkgs_dirs)s
channel URLs : %(_channels)s
config file : %(rc_path)s
is foreign system : %(is_foreign)s
""" % info_dict)
if not is_initialized():
print("""\
# NOTE:
# root directory '%s' is uninitialized""" % config.root_dir)
if args.envs:
common.handle_envs_list(info_dict['envs'], not args.json)
if args.system and not args.json:
from conda.cli.find_commands import find_commands, find_executable
print("sys.version: %s..." % (sys.version[:40]))
print("sys.prefix: %s" % sys.prefix)
print("sys.executable: %s" % sys.executable)
print("conda location: %s" % dirname(conda.__file__))
for cmd in sorted(set(find_commands() + ['build'])):
print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
print("user site dirs: ", end='')
site_dirs = get_user_site()
if site_dirs:
print(site_dirs[0])
else:
print()
for site_dir in site_dirs[1:]:
print(' %s' % site_dir)
print()
evars = ['PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
'CIO_TEST', 'CONDA_ENVS_PATH']
if config.platform == 'linux':
evars.append('LD_LIBRARY_PATH')
elif config.platform == 'osx':
evars.append('DYLD_LIBRARY_PATH')
for ev in sorted(evars):
print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
print()
if args.license and not args.json:
try:
from _license import show_info
show_info()
except ImportError:
print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")
if args.json:
common.stdout_json(info_dict)
<|code_end|>
| conda/cli/main_info.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import json
import os
import re
import sys
from collections import defaultdict, OrderedDict
from itertools import chain
from os import listdir
from os.path import exists, expanduser, join
from conda.cli import common
from conda.compat import iteritems
help = "Display information about current conda install."
example = """
Examples:
conda info -a
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'info',
description=help,
help=help,
epilog=example,
)
common.add_parser_json(p)
p.add_argument(
'-a', "--all",
action="store_true",
help="Show all information, (environments, license, and system "
"information.")
p.add_argument(
'-e', "--envs",
action="store_true",
help="List all known conda environments.",
)
p.add_argument(
'-l', "--license",
action="store_true",
help="Display information about the local conda licenses list.",
)
p.add_argument(
'-s', "--system",
action="store_true",
help="List environment variables.",
)
p.add_argument(
'packages',
action="store",
nargs='*',
help="Display information about packages.",
)
p.add_argument(
'--root',
action='store_true',
help='Display root environment path.',
)
p.add_argument(
'--unsafe-channels',
action='store_true',
help='Display list of channels with tokens exposed.',
)
p.set_defaults(func=execute)
def show_pkg_info(name):
from conda.api import get_index
from conda.resolve import Resolve
index = get_index()
r = Resolve(index)
print(name)
if name in r.groups:
for pkg in sorted(r.get_pkgs(name)):
print(' %-15s %15s %s' % (
pkg.version,
pkg.build,
common.disp_features(r.features(pkg.fn))))
else:
print(' not available')
# TODO
python_re = re.compile('python\d\.\d')
def get_user_site():
site_dirs = []
if sys.platform != 'win32':
if exists(expanduser('~/.local/lib')):
for path in listdir(expanduser('~/.local/lib/')):
if python_re.match(path):
site_dirs.append("~/.local/lib/%s" % path)
else:
if 'APPDATA' not in os.environ:
return site_dirs
APPDATA = os.environ['APPDATA']
if exists(join(APPDATA, 'Python')):
site_dirs = [join(APPDATA, 'Python', i) for i in
listdir(join(APPDATA, 'PYTHON'))]
return site_dirs
def pretty_package(pkg):
import conda.config as config
from conda.utils import human_bytes
d = OrderedDict([
('file name', pkg.fn),
('name', pkg.name),
('version', pkg.version),
('build number', pkg.build_number),
('build string', pkg.build),
('channel', config.canonical_channel_name(pkg.channel)),
('size', human_bytes(pkg.info['size'])),
])
rest = pkg.info
for key in sorted(rest):
if key in {'build', 'depends', 'requires', 'channel', 'name',
'version', 'build_number', 'size'}:
continue
d[key] = rest[key]
print()
header = "%s %s %s" % (d['name'], d['version'], d['build string'])
print(header)
print('-'*len(header))
for key in d:
print("%-12s: %s" % (key, d[key]))
print('dependencies:')
for dep in pkg.info['depends']:
print(' %s' % dep)
def execute(args, parser):
import os
from os.path import dirname
import conda
import conda.config as config
from conda.resolve import Resolve
from conda.cli.main_init import is_initialized
from conda.api import get_index
if args.root:
if args.json:
common.stdout_json({'root_prefix': config.root_dir})
else:
print(config.root_dir)
return
if args.packages:
index = get_index()
r = Resolve(index)
if args.json:
common.stdout_json({
package: [p._asdict()
for p in sorted(r.get_pkgs(common.arg2spec(package)))]
for package in args.packages
})
else:
for package in args.packages:
versions = r.get_pkgs(common.arg2spec(package))
for pkg in sorted(versions):
pretty_package(pkg)
return
options = 'envs', 'system', 'license'
try:
import requests
requests_version = requests.__version__
except ImportError:
requests_version = "could not import"
except Exception as e:
requests_version = "Error %s" % e
try:
import conda_build
except ImportError:
conda_build_version = "not installed"
except Exception as e:
conda_build_version = "Error %s" % e
else:
conda_build_version = conda_build.__version__
# this is a hack associated with channel weight until we get the package cache reworked
# in a future release
# for now, just ordering the channels for display in a semi-plausible way
d = defaultdict(list)
any(d[v[1]].append(k) for k, v in iteritems(config.get_channel_urls()))
channels = list(chain.from_iterable(d[q] for q in sorted(d, reverse=True)))
info_dict = dict(
platform=config.subdir,
conda_version=conda.__version__,
conda_build_version=conda_build_version,
root_prefix=config.root_dir,
root_writable=config.root_writable,
pkgs_dirs=config.pkgs_dirs,
envs_dirs=config.envs_dirs,
default_prefix=config.default_prefix,
channels=channels,
rc_path=config.rc_path,
user_rc_path=config.user_rc_path,
sys_rc_path=config.sys_rc_path,
is_foreign=bool(config.foreign),
envs=[],
python_version='.'.join(map(str, sys.version_info)),
requests_version=requests_version,
)
if args.unsafe_channels:
if not args.json:
print("\n".join(info_dict["channels"]))
else:
print(json.dumps({"channels": info_dict["channels"]}))
return 0
else:
info_dict['channels'] = [config.hide_binstar_tokens(c) for c in
info_dict['channels']]
if args.all or args.json:
for option in options:
setattr(args, option, True)
if args.all or all(not getattr(args, opt) for opt in options):
for key in 'pkgs_dirs', 'envs_dirs', 'channels':
info_dict['_' + key] = ('\n' + 24 * ' ').join(info_dict[key])
info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else
'read only')
print("""\
Current conda install:
platform : %(platform)s
conda version : %(conda_version)s
conda-build version : %(conda_build_version)s
python version : %(python_version)s
requests version : %(requests_version)s
root environment : %(root_prefix)s (%(_rtwro)s)
default environment : %(default_prefix)s
envs directories : %(_envs_dirs)s
package cache : %(_pkgs_dirs)s
channel URLs : %(_channels)s
config file : %(rc_path)s
is foreign system : %(is_foreign)s
""" % info_dict)
if not is_initialized():
print("""\
# NOTE:
# root directory '%s' is uninitialized""" % config.root_dir)
if args.envs:
common.handle_envs_list(info_dict['envs'], not args.json)
if args.system and not args.json:
from conda.cli.find_commands import find_commands, find_executable
print("sys.version: %s..." % (sys.version[:40]))
print("sys.prefix: %s" % sys.prefix)
print("sys.executable: %s" % sys.executable)
print("conda location: %s" % dirname(conda.__file__))
for cmd in sorted(set(find_commands() + ['build'])):
print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
print("user site dirs: ", end='')
site_dirs = get_user_site()
if site_dirs:
print(site_dirs[0])
else:
print()
for site_dir in site_dirs[1:]:
print(' %s' % site_dir)
print()
evars = ['PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
'CIO_TEST', 'CONDA_ENVS_PATH']
if config.platform == 'linux':
evars.append('LD_LIBRARY_PATH')
elif config.platform == 'osx':
evars.append('DYLD_LIBRARY_PATH')
for ev in sorted(evars):
print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
print()
if args.license and not args.json:
try:
from _license import show_info
show_info()
except ImportError:
print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")
if args.json:
common.stdout_json(info_dict)
| conda/cli/main_info.py
--- a/conda/cli/main_info.py
+++ b/conda/cli/main_info.py
@@ -148,7 +148,7 @@ def execute(args, parser):
import conda.config as config
from conda.resolve import Resolve
from conda.cli.main_init import is_initialized
- from conda.api import get_index, get_package_versions
+ from conda.api import get_index
if args.root:
if args.json:
@@ -158,21 +158,19 @@ def execute(args, parser):
return
if args.packages:
- if args.json:
- results = defaultdict(list)
- for arg in args.packages:
- for pkg in get_package_versions(arg):
- results[arg].append(pkg._asdict())
- common.stdout_json(results)
- return
index = get_index()
r = Resolve(index)
- specs = map(common.arg2spec, args.packages)
-
- for spec in specs:
- versions = r.get_pkgs(spec)
- for pkg in sorted(versions):
- pretty_package(pkg)
+ if args.json:
+ common.stdout_json({
+ package: [p._asdict()
+ for p in sorted(r.get_pkgs(common.arg2spec(package)))]
+ for package in args.packages
+ })
+ else:
+ for package in args.packages:
+ versions = r.get_pkgs(common.arg2spec(package))
+ for pkg in sorted(versions):
+ pretty_package(pkg)
return
options = 'envs', 'system', 'license' |
When no condarc is found "no path!" is printed
Why was https://github.com/conda/conda/blob/master/conda/config.py#L115 added?
Is this a debug statement which never got removed?
| conda/config.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from collections import OrderedDict
from os.path import abspath, expanduser, isfile, isdir, join
from platform import machine
from conda.compat import urlparse
from conda.utils import try_write, memoized, yaml_load
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
default_python = '%d.%d' % sys.version_info[:2]
# CONDA_FORCE_32BIT should only be used when running conda-build (in order
# to build 32-bit packages on a 64-bit system). We don't want to mention it
# in the documentation, because it can mess up a lot of things.
force_32bit = bool(int(os.getenv('CONDA_FORCE_32BIT', 0)))
# ----- operating system and architecture -----
_sys_map = {'linux2': 'linux', 'linux': 'linux',
'darwin': 'osx', 'win32': 'win', 'openbsd5': 'openbsd'}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if force_32bit:
bits = 32
if platform == 'linux' and machine() in non_x86_linux_machines:
arch_name = machine()
subdir = 'linux-%s' % arch_name
else:
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
subdir = '%s-%d' % (platform, bits)
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
ADD_BINSTAR_TOKEN = True
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
'root_dir',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
local_channel = []
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc(path):
if not path or not isfile(path):
print("no path!")
return {}
with open(path) as f:
return yaml_load(f) or {}
rc = load_condarc(rc_path)
sys_rc = load_condarc(sys_rc_path) if isfile(sys_rc_path) else {}
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT',
rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
# ~/envs for backwards compatibility
lst = ['~/.conda/envs', '~/envs'] + lst
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
envs_dirs = [abspath(expanduser(path)) for path in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs32' if force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
# ----- default environment prefix -----
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- channels -----
# Note, get_*_urls() return unnormalized urls.
def get_local_urls(clear_cache=True):
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
if clear_cache:
from conda.fetch import fetch_index
fetch_index.cache = {}
if local_channel:
return local_channel
from os.path import exists
from conda.utils import url_path
try:
from conda_build.config import croot
if exists(croot):
local_channel.append(url_path(croot))
except ImportError:
pass
return local_channel
def get_default_urls():
if isfile(sys_rc_path):
sys_rc = load_condarc(sys_rc_path)
if 'default_channels' in sys_rc:
return sys_rc['default_channels']
return ['https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro']
def get_rc_urls():
if rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
return urlparse.urlparse(url).scheme != ""
@memoized
def binstar_channel_alias(channel_alias):
if rc.get('add_anaconda_token',
rc.get('add_binstar_token', ADD_BINSTAR_TOKEN)):
try:
from binstar_client.utils import get_binstar
bs = get_binstar()
channel_alias = bs.domain.replace("api", "conda")
if not channel_alias.endswith('/'):
channel_alias += '/'
if bs.token:
channel_alias += 't/%s/' % bs.token
except ImportError:
log.debug("Could not import binstar")
pass
except Exception as e:
stderrlog.info("Warning: could not import binstar_client (%s)" % e)
return channel_alias
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if not sys_rc.get('allow_other_channels', True) and 'channel_alias' in sys_rc:
channel_alias = sys_rc['channel_alias']
_binstar = r'((:?%s|binstar\.org|anaconda\.org)/?)(t/[0-9a-zA-Z\-<>]{4,})/'
BINSTAR_TOKEN_PAT = re.compile(_binstar % re.escape(channel_alias))
def hide_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1t/<TOKEN>/', url)
def remove_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
channel_alias = remove_binstar_tokens(channel_alias.rstrip('/') + '/')
def normalize_urls(urls, platform=None, offline_only=False):
platform = platform or subdir
defaults = tuple(x.rstrip('/') + '/' for x in get_default_urls())
alias = binstar_channel_alias(channel_alias)
def normalize_(url):
url = url.rstrip('/')
if is_url(url):
url_s = canonical_channel_name(url, True)
else:
url_s = url
url = alias + url
return url_s, url
newurls = OrderedDict()
priority = 0
while urls:
url = urls[0]
urls = urls[1:]
if url == "system" and rc_path:
urls = get_rc_urls() + urls
continue
elif url in ("defaults", "system"):
t_urls = defaults
elif url == "local":
t_urls = get_local_urls()
else:
t_urls = [url]
priority += 1
for url0 in t_urls:
url_s, url0 = normalize_(url0)
if offline_only and not url0.startswith('file:'):
continue
for plat in (platform, 'noarch'):
newurls.setdefault('%s/%s/' % (url0, plat), (url_s, priority))
return newurls
offline = bool(rc.get('offline', False))
def get_channel_urls(platform=None, offline=False):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
elif 'channels' in rc:
base_urls = ['system']
else:
base_urls = ['defaults']
res = normalize_urls(base_urls, platform, offline)
return res
def canonical_channel_name(channel, hide=True, no_unknown=False):
if channel is None:
return 'defaults' if no_unknown else '<unknown>'
channel = remove_binstar_tokens(channel).rstrip('/')
if any(channel.startswith(i) for i in get_default_urls()):
return 'defaults'
elif any(channel.startswith(i) for i in get_local_urls(clear_cache=False)):
return 'local'
elif channel.startswith('http://filer/'):
return 'filer'
elif channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1]
elif channel.startswith('http:/'):
channel2 = 'https' + channel[4:]
channel3 = canonical_channel_name(channel2, hide, no_unknown)
return channel3 if channel3 != channel2 else channel
else:
return channel
def url_channel(url):
if url is None:
return None, '<unknown>'
channel = url.rsplit('/', 2)[0]
schannel = canonical_channel_name(channel)
return channel, schannel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = ['system']
else:
base_urls = ['default']
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers') or {}
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
# ----- misc -----
add_pip_as_python_dependency = bool(rc.get('add_pip_as_python_dependency', True))
always_yes = bool(rc.get('always_yes', False))
always_copy = bool(rc.get('always_copy', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('anaconda_upload',
rc.get('binstar_upload', None)) # None means ask
allow_softlinks = bool(rc.get('allow_softlinks', True))
self_update = bool(rc.get('self_update', True))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = rc.get('show_channel_urls', None) # None means letting conda decide
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
update_dependencies = bool(rc.get('update_dependencies', True))
channel_priority = bool(rc.get('channel_priority', True))
# ssl_verify can be a boolean value or a filename string
ssl_verify = rc.get('ssl_verify', True)
try:
track_features = set(rc['track_features'])
except KeyError:
track_features = None
<|code_end|>
| conda/config.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from collections import OrderedDict
from os.path import abspath, expanduser, isfile, isdir, join
from platform import machine
from conda.compat import urlparse
from conda.utils import try_write, memoized, yaml_load
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
default_python = '%d.%d' % sys.version_info[:2]
# CONDA_FORCE_32BIT should only be used when running conda-build (in order
# to build 32-bit packages on a 64-bit system). We don't want to mention it
# in the documentation, because it can mess up a lot of things.
force_32bit = bool(int(os.getenv('CONDA_FORCE_32BIT', 0)))
# ----- operating system and architecture -----
_sys_map = {'linux2': 'linux', 'linux': 'linux',
'darwin': 'osx', 'win32': 'win', 'openbsd5': 'openbsd'}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if force_32bit:
bits = 32
if platform == 'linux' and machine() in non_x86_linux_machines:
arch_name = machine()
subdir = 'linux-%s' % arch_name
else:
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
subdir = '%s-%d' % (platform, bits)
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
ADD_BINSTAR_TOKEN = True
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
'root_dir',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
local_channel = []
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc(path):
if not path or not isfile(path):
return {}
with open(path) as f:
return yaml_load(f) or {}
rc = load_condarc(rc_path)
sys_rc = load_condarc(sys_rc_path) if isfile(sys_rc_path) else {}
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT',
rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
# ~/envs for backwards compatibility
lst = ['~/.conda/envs', '~/envs'] + lst
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
envs_dirs = [abspath(expanduser(path)) for path in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs32' if force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
# ----- default environment prefix -----
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- channels -----
# Note, get_*_urls() return unnormalized urls.
def get_local_urls(clear_cache=True):
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
if clear_cache:
from conda.fetch import fetch_index
fetch_index.cache = {}
if local_channel:
return local_channel
from os.path import exists
from conda.utils import url_path
try:
from conda_build.config import croot
if exists(croot):
local_channel.append(url_path(croot))
except ImportError:
pass
return local_channel
def get_default_urls():
if isfile(sys_rc_path):
sys_rc = load_condarc(sys_rc_path)
if 'default_channels' in sys_rc:
return sys_rc['default_channels']
return ['https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro']
def get_rc_urls():
if rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
return urlparse.urlparse(url).scheme != ""
@memoized
def binstar_channel_alias(channel_alias):
if rc.get('add_anaconda_token',
rc.get('add_binstar_token', ADD_BINSTAR_TOKEN)):
try:
from binstar_client.utils import get_binstar
bs = get_binstar()
channel_alias = bs.domain.replace("api", "conda")
if not channel_alias.endswith('/'):
channel_alias += '/'
if bs.token:
channel_alias += 't/%s/' % bs.token
except ImportError:
log.debug("Could not import binstar")
pass
except Exception as e:
stderrlog.info("Warning: could not import binstar_client (%s)" % e)
return channel_alias
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if not sys_rc.get('allow_other_channels', True) and 'channel_alias' in sys_rc:
channel_alias = sys_rc['channel_alias']
_binstar = r'((:?%s|binstar\.org|anaconda\.org)/?)(t/[0-9a-zA-Z\-<>]{4,})/'
BINSTAR_TOKEN_PAT = re.compile(_binstar % re.escape(channel_alias))
def hide_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1t/<TOKEN>/', url)
def remove_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
channel_alias = remove_binstar_tokens(channel_alias.rstrip('/') + '/')
def normalize_urls(urls, platform=None, offline_only=False):
platform = platform or subdir
defaults = tuple(x.rstrip('/') + '/' for x in get_default_urls())
alias = binstar_channel_alias(channel_alias)
def normalize_(url):
url = url.rstrip('/')
if is_url(url):
url_s = canonical_channel_name(url, True)
else:
url_s = url
url = alias + url
return url_s, url
newurls = OrderedDict()
priority = 0
while urls:
url = urls[0]
urls = urls[1:]
if url == "system" and rc_path:
urls = get_rc_urls() + urls
continue
elif url in ("defaults", "system"):
t_urls = defaults
elif url == "local":
t_urls = get_local_urls()
else:
t_urls = [url]
priority += 1
for url0 in t_urls:
url_s, url0 = normalize_(url0)
if offline_only and not url0.startswith('file:'):
continue
for plat in (platform, 'noarch'):
newurls.setdefault('%s/%s/' % (url0, plat), (url_s, priority))
return newurls
offline = bool(rc.get('offline', False))
def get_channel_urls(platform=None, offline=False):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
elif 'channels' in rc:
base_urls = ['system']
else:
base_urls = ['defaults']
res = normalize_urls(base_urls, platform, offline)
return res
def canonical_channel_name(channel, hide=True, no_unknown=False):
if channel is None:
return 'defaults' if no_unknown else '<unknown>'
channel = remove_binstar_tokens(channel).rstrip('/')
if any(channel.startswith(i) for i in get_default_urls()):
return 'defaults'
elif any(channel.startswith(i) for i in get_local_urls(clear_cache=False)):
return 'local'
elif channel.startswith('http://filer/'):
return 'filer'
elif channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1]
elif channel.startswith('http:/'):
channel2 = 'https' + channel[4:]
channel3 = canonical_channel_name(channel2, hide, no_unknown)
return channel3 if channel3 != channel2 else channel
else:
return channel
def url_channel(url):
if url is None:
return None, '<unknown>'
channel = url.rsplit('/', 2)[0]
schannel = canonical_channel_name(channel)
return channel, schannel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = ['system']
else:
base_urls = ['default']
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers') or {}
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
# ----- misc -----
add_pip_as_python_dependency = bool(rc.get('add_pip_as_python_dependency', True))
always_yes = bool(rc.get('always_yes', False))
always_copy = bool(rc.get('always_copy', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('anaconda_upload',
rc.get('binstar_upload', None)) # None means ask
allow_softlinks = bool(rc.get('allow_softlinks', True))
self_update = bool(rc.get('self_update', True))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = rc.get('show_channel_urls', None) # None means letting conda decide
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
update_dependencies = bool(rc.get('update_dependencies', True))
channel_priority = bool(rc.get('channel_priority', True))
# ssl_verify can be a boolean value or a filename string
ssl_verify = rc.get('ssl_verify', True)
try:
track_features = set(rc['track_features'])
except KeyError:
track_features = None
| conda/config.py
--- a/conda/config.py
+++ b/conda/config.py
@@ -113,7 +113,6 @@ def get_rc_path():
def load_condarc(path):
if not path or not isfile(path):
- print("no path!")
return {}
with open(path) as f:
return yaml_load(f) or {} |
conda v4.1.0rc1 doesn't link conda or activate into created env
The trace below should demonstrate the problem. As an aside, notice that PS1 is also mixed up: two round brackets and no space after '$' sign.
``` bash
ijstokes@petawawa ~ $ which conda
/Users/ijstokes/anaconda/bin/conda
ijstokes@petawawa ~ $ conda -V
conda 4.1.0rc1
ijstokes@petawawa ~ $ conda create -n testenv bokeh notebook
Using Anaconda Cloud api site https://api.anaconda.org
Fetching package metadata ...........
Solving package specifications ............
Package plan for installation in environment /Users/ijstokes/anaconda/envs/testenv:
The following packages will be downloaded:
package | build
---------------------------|-----------------
functools32-3.2.3.2 | py27_0 15 KB
markupsafe-0.23 | py27_0 22 KB
jsonschema-2.5.1 | py27_0 55 KB
singledispatch-3.4.0.3 | py27_0 12 KB
ipython-4.2.0 | py27_0 930 KB
------------------------------------------------------------
Total: 1.0 MB
The following NEW packages will be INSTALLED:
appnope: 0.1.0-py27_0
backports: 1.0-py27_0
backports_abc: 0.4-py27_0
bokeh: 0.11.1-py27_0
configparser: 3.5.0b2-py27_1
decorator: 4.0.9-py27_0
entrypoints: 0.2-py27_1
functools32: 3.2.3.2-py27_0
futures: 3.0.5-py27_0
get_terminal_size: 1.0.0-py27_0
ipykernel: 4.3.1-py27_0
ipython: 4.2.0-py27_0
ipython_genutils: 0.1.0-py27_0
jinja2: 2.8-py27_0
jsonschema: 2.5.1-py27_0
jupyter_client: 4.2.2-py27_0
jupyter_core: 4.1.0-py27_0
markupsafe: 0.23-py27_0
mistune: 0.7.2-py27_1
mkl: 11.3.1-0
nbconvert: 4.2.0-py27_0
nbformat: 4.0.1-py27_0
notebook: 4.2.0-py27_0
numpy: 1.11.0-py27_0
openssl: 1.0.2h-0
path.py: 8.2.1-py27_0
pexpect: 4.0.1-py27_0
pickleshare: 0.5-py27_0
pip: 8.1.1-py27_1
ptyprocess: 0.5-py27_0
pygments: 2.1.3-py27_0
python: 2.7.11-0
python-dateutil: 2.5.2-py27_0
pyyaml: 3.11-py27_1
pyzmq: 15.2.0-py27_0
readline: 6.2-2
requests: 2.9.1-py27_0
setuptools: 20.7.0-py27_0
simplegeneric: 0.8.1-py27_0
singledispatch: 3.4.0.3-py27_0
six: 1.10.0-py27_0
sqlite: 3.9.2-0
ssl_match_hostname: 3.4.0.2-py27_1
terminado: 0.5-py27_1
tk: 8.5.18-0
tornado: 4.3-py27_0
traitlets: 4.2.1-py27_0
wheel: 0.29.0-py27_0
yaml: 0.1.6-0
zlib: 1.2.8-0
Proceed ([y]/n)? y
Fetching packages ...
functools32-3. 100% |############################################################################| Time: 0:00:00 245.53 kB/s
markupsafe-0.2 100% |############################################################################| Time: 0:00:00 267.15 kB/s
jsonschema-2.5 100% |############################################################################| Time: 0:00:01 55.12 kB/s
singledispatch 100% |############################################################################| Time: 0:00:00 204.19 kB/s
ipython-4.2.0- 100% |############################################################################| Time: 0:00:02 352.48 kB/s
Extracting packages ...
[ COMPLETE ]|###############################################################################################| 100%
Linking packages ...
[ COMPLETE ]|###############################################################################################| 100%
#
# To activate this environment, use:
# $ source activate testenv
#
# To deactivate this environment, use:
# $ source deactivate
#
ijstokes@petawawa ~ $ source activate testenv
prepending /Users/ijstokes/anaconda/envs/testenv/bin to PATH
((testenv)) ijstokes@petawawa ~ $conda info -a
-bash: conda: command not found
((testenv)) ijstokes@petawawa ~ $source activate root
-bash: activate: No such file or directory
```
| conda/cli/activate.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import (find_parent_shell, shells, run_in)
on_win = sys.platform == "win32"
def help(command):
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
win_process = find_parent_shell()
if command in ('..activate', '..checkenv'):
if win_process in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
Adds the 'Scripts' and 'Library\\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
Adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
elif command == '..deactivate':
if win_process in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the environment prefix, 'Scripts' and 'Library\\bin' directory
of the environment ENV from the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
Removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
else:
sys.exit("No help available for command %s" % sys.argv[1])
def prefix_from_arg(arg, shelldict):
if shelldict['sep'] in arg:
# strip is removing " marks, not \ - look carefully
native_path = shelldict['path_from'](arg)
if isdir(abspath(native_path.strip("\""))):
prefix = abspath(native_path.strip("\""))
else:
raise ValueError('could not find environment: %s' % native_path)
else:
prefix = find_prefix_name(arg)
if prefix is None:
raise ValueError('could not find environment: %s' % arg)
return shelldict['path_to'](prefix)
def binpath_from_arg(arg, shelldict):
# prefix comes back as platform-native path
prefix = prefix_from_arg(arg, shelldict=shelldict)
if sys.platform == 'win32':
paths = [
prefix.rstrip("\\"),
os.path.join(prefix, 'Library', 'bin'),
os.path.join(prefix, 'Scripts'),
]
else:
paths = [
os.path.join(prefix, 'bin'),
]
# convert paths to shell-native paths
return [shelldict['path_to'](path) for path in paths]
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def get_path(shelldict):
"""Get path using a subprocess call.
os.getenv path isn't good for us, since bash on windows has a wildly different
path from Windows.
This returns PATH in the native representation of the shell - not necessarily
the native representation of the platform
"""
return run_in(shelldict["printpath"], shelldict)[0]
def main():
from conda.config import root_env_name, root_dir, changeps1
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
help(sys.argv[1])
path = None
shell = find_parent_shell(path=False)
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
if len(sys.argv) == 2 or sys.argv[2].lower() == root_env_name.lower():
binpath = binpath_from_arg(root_env_name, shelldict=shelldict)
rootpath = None
elif len(sys.argv) == 3:
binpath = binpath_from_arg(sys.argv[2], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
sys.exit("Error: did not expect more than one argument")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
# Clear the root path if it is present
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 3:
sys.argv.append(root_env_name)
if len(sys.argv) > 3:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[2].lower() == root_env_name.lower():
# no need to check root env and try to install a symlink there
sys.exit(0)
# this should throw an error and exit if the env or path can't be found.
try:
binpath = binpath_from_arg(sys.argv[2], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(shelldict['path_from'](binpath[0]), root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}, not have write access to conda symlink"
.format(sys.argv[2]))
sys.exit(msg)
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
# DO NOT use os.getenv for this. One Windows especially, it shows cmd.exe settings
# for bash shells. This method uses the shell directly.
path = os.getenv(shelldict['promptvar'], '')
# failsafes
if not path:
if shelldict['exe'] == 'cmd.exe':
path = '$P$G'
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[2]
if changeps1 and env_path:
path = "(({0})) {1}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell
# script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/cli/activate.py
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import (find_parent_shell, shells, run_in)
on_win = sys.platform == "win32"
def help(command):
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
win_process = find_parent_shell()
if command in ('..activate', '..checkenv'):
if win_process in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
Adds the 'Scripts' and 'Library\\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
Adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
elif command == '..deactivate':
if win_process in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the environment prefix, 'Scripts' and 'Library\\bin' directory
of the environment ENV from the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
Removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
else:
sys.exit("No help available for command %s" % sys.argv[1])
def prefix_from_arg(arg, shelldict):
if shelldict['sep'] in arg:
# strip is removing " marks, not \ - look carefully
native_path = shelldict['path_from'](arg)
if isdir(abspath(native_path.strip("\""))):
prefix = abspath(native_path.strip("\""))
else:
raise ValueError('could not find environment: %s' % native_path)
else:
prefix = find_prefix_name(arg)
if prefix is None:
raise ValueError('could not find environment: %s' % arg)
return shelldict['path_to'](prefix)
def binpath_from_arg(arg, shelldict):
# prefix comes back as platform-native path
prefix = prefix_from_arg(arg, shelldict=shelldict)
if sys.platform == 'win32':
paths = [
prefix.rstrip("\\"),
os.path.join(prefix, 'Library', 'bin'),
os.path.join(prefix, 'Scripts'),
]
else:
paths = [
os.path.join(prefix, 'bin'),
]
# convert paths to shell-native paths
return [shelldict['path_to'](path) for path in paths]
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def get_path(shelldict):
"""Get path using a subprocess call.
os.getenv path isn't good for us, since bash on windows has a wildly different
path from Windows.
This returns PATH in the native representation of the shell - not necessarily
the native representation of the platform
"""
return run_in(shelldict["printpath"], shelldict)[0]
def main():
from conda.config import root_env_name, root_dir, changeps1
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
help(sys.argv[1])
path = None
shell = find_parent_shell(path=False)
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
if len(sys.argv) == 2 or sys.argv[2].lower() == root_env_name.lower():
binpath = binpath_from_arg(root_env_name, shelldict=shelldict)
rootpath = None
elif len(sys.argv) == 3:
binpath = binpath_from_arg(sys.argv[2], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
sys.exit("Error: did not expect more than one argument")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
# Clear the root path if it is present
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 3:
sys.argv.append(root_env_name)
if len(sys.argv) > 3:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[2].lower() == root_env_name.lower():
# no need to check root env and try to install a symlink there
sys.exit(0)
# this should throw an error and exit if the env or path can't be found.
try:
prefix = prefix_from_arg(sys.argv[2], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}, not have write access to conda symlink"
.format(sys.argv[2]))
sys.exit(msg)
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
# DO NOT use os.getenv for this. One Windows especially, it shows cmd.exe settings
# for bash shells. This method uses the shell directly.
path = os.getenv(shelldict['promptvar'], '')
# failsafes
if not path:
if shelldict['exe'] == 'cmd.exe':
path = '$P$G'
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[2]
if changeps1 and env_path:
path = "(({0})) {1}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell
# script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
| conda/cli/activate.py
--- a/conda/cli/activate.py
+++ b/conda/cli/activate.py
@@ -147,13 +147,13 @@ def main():
# this should throw an error and exit if the env or path can't be found.
try:
- binpath = binpath_from_arg(sys.argv[2], shelldict=shelldict)
+ prefix = prefix_from_arg(sys.argv[2], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
- conda.install.symlink_conda(shelldict['path_from'](binpath[0]), root_dir, shell)
+ conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}, not have write access to conda symlink" |
Disable .netrc authentication
Anaconda-Server/support#23
When a user has a `~/.netrc` file present, `requests` will use that file to populate the `Authorization` header (using HTTP Basic auth). When authorization fails, conda falls into an infinite loop retrying.
To fix this problem in `anaconda-client`, I disabled `.netrc` authorization by adding a `NullAuth` to the `requests.Session`.
https://github.com/Anaconda-Server/anaconda-client/pull/298
| conda/connection.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import base64
import cgi
import email
import ftplib
import mimetypes
import os
import platform
import re
import tempfile
from io import BytesIO
from logging import getLogger
import requests
import conda
from conda.compat import urlparse, StringIO
from conda.config import platform as config_platform, ssl_verify, get_proxy_servers
from conda.utils import gnu_get_libc_version
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if config_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif config_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=conda.__version__,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
<|code_end|>
| conda/connection.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import base64
import cgi
import email
import ftplib
import mimetypes
import os
import platform
import re
import tempfile
from io import BytesIO
from logging import getLogger
import requests
import conda
from conda.compat import urlparse, StringIO
from conda.config import platform as config_platform, ssl_verify, get_proxy_servers
from conda.utils import gnu_get_libc_version
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if config_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif config_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=conda.__version__,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
self.trust_env = False # disable .netrc file
# also disables REQUESTS_CA_BUNDLE, CURL_CA_BUNDLE env variables
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
| conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -91,6 +91,8 @@ def __init__(self, *args, **kwargs):
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
+ self.trust_env = False # disable .netrc file
+ # also disables REQUESTS_CA_BUNDLE, CURL_CA_BUNDLE env variables
# Configure retries
if retries: |
conda.install imports other conda modules
I just noted https://github.com/conda/conda/blob/master/conda/install.py#L47,
however, it has always been a policy that `conda.install` has to be standalone, see:
https://github.com/conda/conda/blob/master/conda/install.py#L22
The problem is that all our installers depend on this, that is, they use `conda.install` as a module for bootstrapping. This means that currently (even though we can build installers) they are broken.
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
from conda.compat import iteritems, iterkeys
from conda.config import url_channel, pkgs_dirs, root_dir
from conda.utils import url_path
try:
from conda.lock import Locked
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
try:
from conda.utils import win_path_to_unix
except ImportError:
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
# Make sure the script stays standalone for the installer
try:
from conda.config import remove_binstar_tokens
except ImportError:
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def _dist2pair(dist):
dparts = dist.split('::', 1)
return ('defaults', dparts[0]) if len(dparts) == 1 else dparts
def name_dist(dist):
return dist.split('::', 1)[-1].rsplit('-', 2)[0]
def _dist2filename(dist, suffix='.tar.bz2'):
return dist.split('::', 1)[-1] + suffix
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
load_linked_data(prefix, dist, meta)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
return package_cache().get(dist, {}).get('urls', (None,))[0]
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = _dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
dist = url.rsplit('/', 1)[-1]
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
url = remove_binstar_tokens(url)
channel, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
for fn in os.listdir(pdir):
add_cached_package(pdir, '<unknown>/' + fn)
except IOError:
continue
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = _dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in iteritems(package_cache()) if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in iteritems(package_cache()) if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = _dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
_, schannel = url_channel(rec.get('url'))
else:
linked_data(prefix)
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
rec['fn'] = dname + '.tar.bz2'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', _dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(iterkeys(linked_data(prefix)))
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', _dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted(pkgs_dir))
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(pkgs_dir, prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return None, 'defaults'
# We don't use the package cache or trash logic in the installer
pkgs_dirs = []
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def _dist2pair(dist):
dparts = dist.split('::', 1)
return ('defaults', dparts[0]) if len(dparts) == 1 else dparts
def name_dist(dist):
return dist.split('::', 1)[-1].rsplit('-', 2)[0]
def _dist2filename(dist, suffix='.tar.bz2'):
return dist.split('::', 1)[-1] + suffix
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
# only update the package cache if it is loaded for this prefix.
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = _dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = _dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = _dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
_, schannel = url_channel(rec.get('url'))
else:
linked_data(prefix)
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
rec['fn'] = dname + '.tar.bz2'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', _dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', _dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
if opts.verbose:
print("prefix: %r" % prefix)
pkgs_dirs.append(pkgs_dir)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -44,12 +44,10 @@
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
-from conda.compat import iteritems, iterkeys
-from conda.config import url_channel, pkgs_dirs, root_dir
-from conda.utils import url_path
-
try:
from conda.lock import Locked
+ from conda.utils import win_path_to_unix, url_path
+ from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
@@ -63,9 +61,6 @@ def __enter__(self):
def __exit__(self, exc_type, exc_value, traceback):
pass
-try:
- from conda.utils import win_path_to_unix
-except ImportError:
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
@@ -78,14 +73,23 @@ def translation(found_path):
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
-# Make sure the script stays standalone for the installer
-try:
- from conda.config import remove_binstar_tokens
-except ImportError:
+ def url_path(path):
+ path = abspath(path)
+ if sys.platform == 'win32':
+ path = '/' + path.replace(':', '|').replace('\\', '/')
+ return 'file://%s' % path
+
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
+ # A simpler version of url_channel will do
+ def url_channel(url):
+ return None, 'defaults'
+
+ # We don't use the package cache or trash logic in the installer
+ pkgs_dirs = []
+
on_win = bool(sys.platform == "win32")
if on_win:
@@ -419,13 +423,15 @@ def create_meta(prefix, dist, info_dir, extra_info):
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
- load_linked_data(prefix, dist, meta)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
+ # only update the package cache if it is loaded for this prefix.
+ if prefix in linked_data_:
+ load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
@@ -493,7 +499,8 @@ def run_script(prefix, dist, action='post-link', env_prefix=None):
def read_url(dist):
- return package_cache().get(dist, {}).get('urls', (None,))[0]
+ res = package_cache().get(dist, {}).get('urls', (None,))
+ return res[0] if res else None
def read_icondata(source_dir):
@@ -595,7 +602,11 @@ def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
- dist = url.rsplit('/', 1)[-1]
+ if '/' in url:
+ dist = url.rsplit('/', 1)[-1]
+ else:
+ dist = url
+ url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
@@ -614,7 +625,7 @@ def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
if not (xpkg or xdir):
return
url = remove_binstar_tokens(url)
- channel, schannel = url_channel(url)
+ _, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
@@ -622,7 +633,7 @@ def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
- if url not in rec['urls']:
+ if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
@@ -656,10 +667,10 @@ def package_cache():
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
- for fn in os.listdir(pdir):
- add_cached_package(pdir, '<unknown>/' + fn)
except IOError:
- continue
+ pass
+ for fn in os.listdir(pdir):
+ add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
@@ -699,7 +710,7 @@ def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
- return set(dist for dist, rec in iteritems(package_cache()) if rec['files'])
+ return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
@@ -735,7 +746,7 @@ def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
- return set(dist for dist, rec in iteritems(package_cache()) if rec['dirs'])
+ return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
@@ -874,7 +885,7 @@ def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
- return set(iterkeys(linked_data(prefix)))
+ return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
@@ -918,6 +929,7 @@ def move_path_to_trash(path):
"""
# Try deleting the trash every time we use it.
delete_trash()
+ from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
@@ -1149,14 +1161,15 @@ def main():
pkgs_dir = join(prefix, 'pkgs')
if opts.verbose:
print("prefix: %r" % prefix)
+ pkgs_dirs.append(pkgs_dir)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
- idists = sorted(extracted(pkgs_dir))
+ idists = sorted(extracted())
linktype = (LINK_HARD
- if try_hard_link(pkgs_dir, prefix, idists[0]) else
+ if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
@@ -1164,7 +1177,7 @@ def main():
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
- link(pkgs_dir, prefix, dist, linktype)
+ link(prefix, dist, linktype)
messages(prefix)
|
Add test for conda/install.py imports
https://github.com/conda/conda/pull/2526#issuecomment-220751869
Since this type of bug was introduced by at least three people independently, I think it would be good to add a test for this. The test could be something along the lines of:
```
tmp_dir = tempfile.mkdtemp()
shutil.copyfile(conda.install.__file__, tmp_dir)
sys.path = [tmp_dir] # we also need the standard library here
import install
```
Basically, put the `install` module into an empty directory, and test if it can be imported by itself.
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return None, 'defaults'
# We don't use the package cache or trash logic in the installer
pkgs_dirs = []
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def _dist2pair(dist):
dparts = dist.split('::', 1)
return ('defaults', dparts[0]) if len(dparts) == 1 else dparts
def name_dist(dist):
return dist.split('::', 1)[-1].rsplit('-', 2)[0]
def _dist2filename(dist, suffix='.tar.bz2'):
return dist.split('::', 1)[-1] + suffix
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
# only update the package cache if it is loaded for this prefix.
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = _dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = _dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = _dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
_, schannel = url_channel(rec.get('url'))
else:
linked_data(prefix)
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
rec['fn'] = dname + '.tar.bz2'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', _dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', _dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
if opts.verbose:
print("prefix: %r" % prefix)
pkgs_dirs.append(pkgs_dir)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def _dist2pair(dist):
dparts = dist.split('::', 1)
return ('defaults', dparts[0]) if len(dparts) == 1 else dparts
def name_dist(dist):
return dist.split('::', 1)[-1].rsplit('-', 2)[0]
def _dist2filename(dist, suffix='.tar.bz2'):
return dist.split('::', 1)[-1] + suffix
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = _dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = _dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = _dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
channel, schannel = url_channel(url)
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', _dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', _dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -85,10 +85,9 @@ def remove_binstar_tokens(url):
# A simpler version of url_channel will do
def url_channel(url):
- return None, 'defaults'
+ return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
- # We don't use the package cache or trash logic in the installer
- pkgs_dirs = []
+ pkgs_dirs = [join(sys.prefix, 'pkgs')]
on_win = bool(sys.platform == "win32")
@@ -423,13 +422,14 @@ def create_meta(prefix, dist, info_dir, extra_info):
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
+ if 'url' not in meta:
+ meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
- # only update the package cache if it is loaded for this prefix.
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
@@ -821,12 +821,15 @@ def load_linked_data(prefix, dist, rec=None):
rec = json.load(fi)
except IOError:
return None
- _, schannel = url_channel(rec.get('url'))
else:
linked_data(prefix)
+ url = rec.get('url')
+ channel, schannel = url_channel(url)
+ if 'fn' not in rec:
+ rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
+ rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
- rec['fn'] = dname + '.tar.bz2'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
@@ -1159,9 +1162,9 @@ def main():
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
+ pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
- pkgs_dirs.append(pkgs_dir)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file))) |
conda search broken for me on master
On master (`4d7286cddf091c0bcf8bd105ad847dd9f57c1ed7`), I now get:
```
$ conda search
...
Traceback (most recent call last):
File "/Users/ilan/python/bin/conda", line 6, in <module>
sys.exit(conda.cli.main())
File "/Users/ilan/conda/conda/cli/main.py", line 120, in main
args_func(args, p)
File "/Users/ilan/conda/conda/cli/main.py", line 127, in args_func
args.func(args, p)
File "/Users/ilan/conda/conda/cli/main_search.py", line 118, in execute
execute_search(args, parser)
File "/Users/ilan/conda/conda/cli/main_search.py", line 155, in execute_search
extracted = conda.install.extracted()
File "/Users/ilan/conda/conda/install.py", line 749, in extracted
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
File "/Users/ilan/conda/conda/install.py", line 673, in package_cache
add_cached_package(pdir, fn)
File "/Users/ilan/conda/conda/install.py", line 627, in add_cached_package
url = remove_binstar_tokens(url)
File "/Users/ilan/conda/conda/config.py", line 258, in remove_binstar_tokens
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
TypeError: expected string or buffer
```
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def _dist2pair(dist):
dparts = dist.split('::', 1)
return ('defaults', dparts[0]) if len(dparts) == 1 else dparts
def name_dist(dist):
return dist.split('::', 1)[-1].rsplit('-', 2)[0]
def _dist2filename(dist, suffix='.tar.bz2'):
return dist.split('::', 1)[-1] + suffix
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = _dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = _dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = _dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
channel, schannel = url_channel(url)
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', _dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', _dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def _dist2pair(dist):
dparts = dist.split('::', 1)
return ('defaults', dparts[0]) if len(dparts) == 1 else dparts
def name_dist(dist):
return dist.split('::', 1)[-1].rsplit('-', 2)[0]
def _dist2filename(dist, suffix='.tar.bz2'):
return dist.split('::', 1)[-1] + suffix
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = _dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = _dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = _dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
channel, schannel = url_channel(url)
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', _dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', _dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -624,7 +624,8 @@ def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
xdir = None
if not (xpkg or xdir):
return
- url = remove_binstar_tokens(url)
+ if url:
+ url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2') |
Another clone bug
```
kfranz@0283:~/continuum/conda *master ❯ conda create --clone root -n rootclone
Source: '/conda'
Destination: '/conda/envs/rootclone'
The following packages cannot be cloned out of the root environment:
- conda-team::conda-4.1.0rc2-py27_0
- conda-build-1.20.1-py27_0
Error: no URL found for package: <unknown>::gcc-4.8.5-3
```
But /conda/conda-meta/gcc-4.8.5-3.json has, in part,
```
"channel": "https://repo.continuum.io/pkgs/free/osx-64/",
```
so the information is there. The logic here is located around line 273 of `conda/misc.py`.
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def _dist2pair(dist):
dparts = dist.split('::', 1)
return ('defaults', dparts[0]) if len(dparts) == 1 else dparts
def name_dist(dist):
return dist.split('::', 1)[-1].rsplit('-', 2)[0]
def _dist2filename(dist, suffix='.tar.bz2'):
return dist.split('::', 1)[-1] + suffix
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = _dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = _dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = _dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
channel, schannel = url_channel(url)
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', _dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', _dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
conda/misc.py
<|code_start|>
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath)
from conda import install
from conda import utils
from conda.compat import iteritems, itervalues
from conda.config import is_url, url_channel, root_dir, envs_dirs
from conda.fetch import fetch_index
from conda.instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
from conda.plan import execute_actions
from conda.resolve import Resolve, MatchSpec
from conda.utils import md5_file
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install.linked(prefix):
meta = install.is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?P<url>.+)/(?P<fn>[^/#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
linked = {install.name_dist(dist): dist for dist in install.linked(prefix)}
fetch_args = fetch_args or {}
index = {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5')
if not is_url(url):
if not isfile(url):
sys.exit('Error: file not found: %s' % url)
url = utils.url_path(url)
url_p, fn = url.rsplit('/', 1)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None
if url_p.startswith('file://'):
prefix = install.cached_url(url)
# If not, determine the channel name from the URL
if prefix is None:
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
pkg_path = install.is_fetched(dist)
dir_path = install.is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
if pkg_path and (md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = install.find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
actions[FETCH].append(dist)
if md5:
# Need to verify against the package index
verifies.append((dist + '.tar.bz2', md5))
channels[url_p + '/'] = (schannel, 0)
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = install.name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Finish the MD5 verification
if verifies:
index = fetch_index(channels, **fetch_args)
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install.linked(prefix):
meta = install.is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not install.name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = install.linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if not quiet and filter:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
# Assemble the URL and channel list
urls = {}
index = {}
for dist, info in iteritems(drecs):
if info['name'] in filter:
continue
url = info.get('url')
if url is None:
sys.exit('Error: no URL found for package: %s' % dist)
_, schannel = url_channel(url)
index[dist + '.tar.bz2'] = info
urls[dist] = url
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet,
force_extract=False, fetch_args=fetch_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if 'channel' in info and 'icon' in info:
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def _dist2pair(dist):
dparts = dist.split('::', 1)
return ('defaults', dparts[0]) if len(dparts) == 1 else dparts
def name_dist(dist):
return dist.split('::', 1)[-1].rsplit('-', 2)[0]
def _dist2filename(dist, suffix='.tar.bz2'):
return dist.split('::', 1)[-1] + suffix
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'] = str(dist).rsplit('-', 2)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = _dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = _dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = _dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if not url and 'channel' in rec:
url = rec['url'] = rec['channel'] + rec['fn']
channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', _dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', _dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
conda/misc.py
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath)
from conda import install
from conda import utils
from conda.compat import iteritems, itervalues
from conda.config import is_url, url_channel, root_dir, envs_dirs
from conda.fetch import fetch_index
from conda.instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
from conda.plan import execute_actions
from conda.resolve import Resolve, MatchSpec
from conda.utils import md5_file
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install.linked(prefix):
meta = install.is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?P<url>.+)/(?P<fn>[^/#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
linked = {install.name_dist(dist): dist for dist in install.linked(prefix)}
fetch_args = fetch_args or {}
index = {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5')
if not is_url(url):
if not isfile(url):
sys.exit('Error: file not found: %s' % url)
url = utils.url_path(url)
url_p, fn = url.rsplit('/', 1)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None
if url_p.startswith('file://'):
prefix = install.cached_url(url)
# If not, determine the channel name from the URL
if prefix is None:
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
pkg_path = install.is_fetched(dist)
dir_path = install.is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
if pkg_path and (md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = install.find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
channels[url_p + '/'] = (schannel, 0)
actions[FETCH].append(dist)
verifies.append((dist + '.tar.bz2', md5))
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = install.name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index.update(fetch_index(channels, **fetch_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install.linked(prefix):
meta = install.is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not install.name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = install.linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if not quiet and filter:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
# Assemble the URL and channel list
urls = {}
index = {}
for dist, info in iteritems(drecs):
if info['name'] in filter:
continue
url = info.get('url')
if url is None:
sys.exit('Error: no URL found for package: %s' % dist)
_, schannel = url_channel(url)
index[dist + '.tar.bz2'] = info
urls[dist] = url
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet,
force_extract=False, fetch_args=fetch_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if 'channel' in info and 'icon' in info:
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -825,9 +825,11 @@ def load_linked_data(prefix, dist, rec=None):
else:
linked_data(prefix)
url = rec.get('url')
- channel, schannel = url_channel(url)
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
+ if not url and 'channel' in rec:
+ url = rec['url'] = rec['channel'] + rec['fn']
+ channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
conda/misc.py
--- a/conda/misc.py
+++ b/conda/misc.py
@@ -93,11 +93,9 @@ def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
_, conflict = install.find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
+ channels[url_p + '/'] = (schannel, 0)
actions[FETCH].append(dist)
- if md5:
- # Need to verify against the package index
- verifies.append((dist + '.tar.bz2', md5))
- channels[url_p + '/'] = (schannel, 0)
+ verifies.append((dist + '.tar.bz2', md5))
actions[EXTRACT].append(dist)
# unlink any installed package with that name
@@ -106,19 +104,21 @@ def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
+ # Pull the repodata for channels we are using
+ if channels:
+ index.update(fetch_index(channels, **fetch_args))
+
# Finish the MD5 verification
- if verifies:
- index = fetch_index(channels, **fetch_args)
- for fn, md5 in verifies:
- info = index.get(fn)
- if info is None:
- sys.exit("Error: no package '%s' in index" % fn)
- if 'md5' not in info:
- sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
- if info['md5'] != md5:
- sys.exit(
- 'MD5 mismatch for: %s\n spec: %s\n repo: %s'
- % (fn, md5, info['md5']))
+ for fn, md5 in verifies:
+ info = index.get(fn)
+ if info is None:
+ sys.exit("Error: no package '%s' in index" % fn)
+ if md5 and 'md5' not in info:
+ sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
+ if md5 and info['md5'] != md5:
+ sys.exit(
+ 'MD5 mismatch for: %s\n spec: %s\n repo: %s'
+ % (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions |
install.package_cache() broken with cache directory does not exist
I just ran into this bug when building a package with `conda-build`. Before building the package, I cleaned out removed the cache directory `pkgs`. The trachback is as follows:
```
Traceback (most recent call last):
File "/home/ilan/minonda/bin/conda-build", line 6, in <module>
exec(compile(open(__file__).read(), __file__, 'exec'))
File "/home/ilan/conda-build/bin/conda-build", line 5, in <module>
sys.exit(main())
File "/home/ilan/conda-build/conda_build/main_build.py", line 140, in main
args_func(args, p)
File "/home/ilan/conda-build/conda_build/main_build.py", line 380, in args_func
args.func(args, p)
File "/home/ilan/conda-build/conda_build/main_build.py", line 367, in execute
build.test(m)
File "/home/ilan/conda-build/conda_build/build.py", line 613, in test
rm_pkgs_cache(m.dist())
File "/home/ilan/conda-build/conda_build/build.py", line 386, in rm_pkgs_cache
plan.execute_plan(rmplan)
File "/home/ilan/conda/conda/plan.py", line 583, in execute_plan
inst.execute_instructions(plan, index, verbose)
File "/home/ilan/conda/conda/instructions.py", line 133, in execute_instructions
cmd(state, arg)
File "/home/ilan/conda/conda/instructions.py", line 65, in RM_FETCHED_CMD
install.rm_fetched(arg)
File "/home/ilan/conda/conda/install.py", line 749, in rm_fetched
rec = package_cache().get(dist)
File "/home/ilan/conda/conda/install.py", line 692, in package_cache
for fn in os.listdir(pdir):
OSError: [Errno 2] No such file or directory: '/home/ilan/minonda/pkgs'
```
The problem is that https://github.com/conda/conda/blob/master/conda/install.py#L692 assumes that the directory already exists, which might not be the case.
CC @mcg1969
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], env['PKG_CHANNEL'] = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if not url and 'channel' in rec:
url = rec['url'] = rec['channel'] + rec['fn']
channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def _safe_relpath(path, start_path):
"""
Used in the move_to_trash flow. Ensures that the result does not
start with any '..' which would allow to escape the trash folder
(and root prefix) and potentially ruin the user's system.
"""
result = normpath(relpath(path, start_path))
parts = result.rsplit(os.sep)
for idx, part in enumerate(parts):
if part != u'..':
return os.sep.join(parts[idx:])
return u''
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, _safe_relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
on_win = bool(sys.platform == "win32")
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == 'binary':
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
sys.exit("Invalid mode:" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win and (placeholder != prefix_placeholder) and ('/' in placeholder):
# original prefix uses unix-style path separators
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], env['PKG_CHANNEL'] = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
if rec is None:
meta_file = join(prefix, 'conda-meta', dname + '.json')
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if not url and 'channel' in rec:
url = rec['url'] = rec['channel'] + rec['fn']
channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if sys.platform == 'win32' else ''
return unc_prefix + join(pkg_dir, '.trash')
def _safe_relpath(path, start_path):
"""
Used in the move_to_trash flow. Ensures that the result does not
start with any '..' which would allow to escape the trash folder
(and root prefix) and potentially ruin the user's system.
"""
result = normpath(relpath(path, start_path))
parts = result.rsplit(os.sep)
for idx, part in enumerate(parts):
if part != u'..':
return os.sep.join(parts[idx:])
return u''
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, _safe_relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -689,8 +689,9 @@ def package_cache():
add_cached_package(pdir, url)
except IOError:
pass
- for fn in os.listdir(pdir):
- add_cached_package(pdir, fn)
+ if isdir(pdir):
+ for fn in os.listdir(pdir):
+ add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
|
Installing packages from files broken in master
```
((p35)) Z:\msarahan\code\conda>conda install --force z:\msarahan\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2
Could not parse explicit URL: z:\msarahan\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2
((p35)) Z:\msarahan\code\conda>conda install --offline z:\msarahan\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2
Could not parse explicit URL: z:\msarahan\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2
((p35)) Z:\msarahan\code\conda>conda install --force ..\..\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2
Could not parse explicit URL: ..\..\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2
```
z: is a mapped network drive - not sure if that makes any difference.
| conda/misc.py
<|code_start|>
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url)
from .compat import iteritems, itervalues
from .config import is_url, url_channel, root_dir, envs_dirs
from .fetch import fetch_index
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, url_path as utils_url_path
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?P<url>.+)/(?P<fn>[^/#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
linked = {name_dist(dist): dist for dist in install_linked(prefix)}
fetch_args = fetch_args or {}
index = {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5')
if not is_url(url):
if not isfile(url):
sys.exit('Error: file not found: %s' % url)
url = utils_url_path(url)
url_p, fn = url.rsplit('/', 1)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None
if url_p.startswith('file://'):
prefix = cached_url(url)
# If not, determine the channel name from the URL
if prefix is None:
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
if pkg_path and (md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
channels[url_p + '/'] = (schannel, 0)
actions[FETCH].append(dist)
verifies.append((dist + '.tar.bz2', md5))
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index.update(fetch_index(channels, **fetch_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if not quiet and filter:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
# Assemble the URL and channel list
urls = {}
index = {}
for dist, info in iteritems(drecs):
if info['name'] in filter:
continue
url = info.get('url')
if url is None:
sys.exit('Error: no URL found for package: %s' % dist)
_, schannel = url_channel(url)
index[dist + '.tar.bz2'] = info
urls[dist] = url
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet,
force_extract=False, fetch_args=fetch_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if 'channel' in info and 'icon' in info:
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
<|code_end|>
| conda/misc.py
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url)
from .compat import iteritems, itervalues
from .config import is_url, url_channel, root_dir, envs_dirs
from .fetch import fetch_index
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, url_path as utils_url_path
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
linked = {name_dist(dist): dist for dist in install_linked(prefix)}
fetch_args = fetch_args or {}
index = {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
sys.exit('Error: file not found: %s' % join(url_p, fn))
url_p = utils_url_path(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None
if url_p.startswith('file://'):
prefix = cached_url(url)
# If not, determine the channel name from the URL
if prefix is None:
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
if pkg_path and (md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
channels[url_p + '/'] = (schannel, 0)
actions[FETCH].append(dist)
verifies.append((dist + '.tar.bz2', md5))
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index.update(fetch_index(channels, **fetch_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if not quiet and filter:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
# Assemble the URL and channel list
urls = {}
index = {}
for dist, info in iteritems(drecs):
if info['name'] in filter:
continue
url = info.get('url')
if url is None:
sys.exit('Error: no URL found for package: %s' % dist)
_, schannel = url_channel(url)
index[dist + '.tar.bz2'] = info
urls[dist] = url
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet,
force_extract=False, fetch_args=fetch_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if 'channel' in info and 'icon' in info:
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
| conda/misc.py
--- a/conda/misc.py
+++ b/conda/misc.py
@@ -9,7 +9,7 @@
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
- isdir, isfile, islink, join, relpath)
+ isdir, isfile, islink, join, relpath, curdir)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url)
@@ -35,8 +35,8 @@ def conda_installed_files(prefix, exclude_self_build=False):
res.update(set(meta['files']))
return res
-
-url_pat = re.compile(r'(?P<url>.+)/(?P<fn>[^/#]+\.tar\.bz2)'
+url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
+ r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
actions = defaultdict(list)
@@ -55,12 +55,14 @@ def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None):
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
- url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5')
- if not is_url(url):
- if not isfile(url):
- sys.exit('Error: file not found: %s' % url)
- url = utils_url_path(url)
- url_p, fn = url.rsplit('/', 1)
+ url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
+ if not is_url(url_p):
+ if url_p is None:
+ url_p = curdir
+ elif not isdir(url_p):
+ sys.exit('Error: file not found: %s' % join(url_p, fn))
+ url_p = utils_url_path(url_p).rstrip('/')
+ url = "{0}/{1}".format(url_p, fn)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None |
Having trouble downloading from osx-64 on Travis CI
_From @jakirkham on June 14, 2016 20:46_
Seems we are now having issues downloading on packages on Travis CI. See the snippet below and this [log](https://travis-ci.org/conda-forge/apache-libcloud-feedstock/jobs/137608834#L369-L376). Note none of the packages below are on conda-forge (there is `python`, but not 2.7). So, this seems to be a `defaults` channel issue. Log snippet below.
```
BUILD START: apache-libcloud-0.18.0-py27_1
Using Anaconda Cloud api site https://api.anaconda.org
Fetching package metadata .......
WARNING: Could not find any versions of conda-build in the channels
Solving package specifications ....
Error: Packages missing in current osx-64 channels:
- python 2.7*
- setuptools
```
Also, see these PRs for examples
- https://github.com/conda-forge/apache-libcloud-feedstock/pull/3
- https://github.com/conda-forge/colorlog-feedstock/pull/4
- https://github.com/conda-forge/python-feedstock/pull/20
cc @msarahan @danielfrg @frol @jjhelmus @vamega
_Copied from original issue: Anaconda-Platform/support#45_
| conda/cli/main_config.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from .common import (Completer, add_parser_json, error_and_exit, exception_and_exit,
stdout_json_success)
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
from ..utils import yaml_load, yaml_dump
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class CouldntParse(NotImplementedError):
def __init__(self, reason):
self.args = ["""Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s""" % reason]
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar=('KEY'),
choices=BoolOrListKey()
)
action.add_argument(
"--add",
nargs=2,
action="append",
help="""Add one configuration value to a list key. The default
behavior is to prepend.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParse, NotImplementedError) as e:
if args.json:
exception_and_exit(e, json=True)
else:
raise
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# Create the file if it doesn't exist
if not os.path.exists(rc_path):
has_defaults = ['channels', 'defaults'] in args.add
if args.add and 'channels' in list(zip(*args.add))[0] and not has_defaults:
# If someone adds a channel and their .condarc doesn't exist, make
# sure it includes the defaults channel, or else they will end up
# with a broken conda.
rc_text = """\
channels:
- defaults
"""
else:
rc_text = ""
else:
with open(rc_path, 'r') as rc:
rc_text = rc.read()
rc_config = yaml_load(rc_text)
if rc_config is None:
rc_config = {}
# Get
if args.get is not None:
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if args.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else:
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
for item in reversed(rc_config.get(key, [])):
# Use repr so that it can be pasted back in to conda config --add
print("--add", key, repr(item))
# Add
for key, item in args.add:
if key not in rc_list_keys:
error_and_exit("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key), json=args.json,
error_type="ValueError")
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParse("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
if item in rc_config.get(key, []):
# Right now, all list keys should not contain duplicates
message = "Skipping %s: %s, item already exists" % (key, item)
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
rc_config.setdefault(key, []).insert(0, item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
yamlitem = yaml_load(item)
if key in set_bools:
if not isinstance(yamlitem, bool):
error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item),
json=args.json, error_type="TypeError")
rc_config[key] = yamlitem
elif key in set_strings:
rc_config[key] = yamlitem
else:
error_and_exit("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key), json=args.json,
error_type="ValueError")
# Remove
for key, item in args.remove:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
if item not in rc_config[key]:
error_and_exit("%r is not in the %r key of the config file" %
(item, key), json=args.json, error_type="KeyError")
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if args.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
<|code_end|>
| conda/cli/main_config.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from .common import (Completer, add_parser_json, error_and_exit, exception_and_exit,
stdout_json_success)
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
from ..utils import yaml_load, yaml_dump
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class CouldntParse(NotImplementedError):
def __init__(self, reason):
self.args = ["""Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s""" % reason]
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar=('KEY'),
choices=BoolOrListKey()
)
action.add_argument(
"--add",
nargs=2,
action="append",
help="""Add one configuration value to a list key. The default
behavior is to prepend.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParse, NotImplementedError) as e:
if args.json:
exception_and_exit(e, json=True)
else:
raise
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh)
else:
rc_config = {}
# add `defaults` channel if creating new condarc file or channel key doesn't exist currently
if 'channels' not in rc_config:
# now check to see if user wants to modify channels at all
if any('channels' in item[0] for item in args.add):
# don't need to insert defaults if it's already in args
if not ['channels', 'defaults'] in args.add:
args.add.insert(0, ['channels', 'defaults'])
# Get
if args.get is not None:
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if args.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else:
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
for item in reversed(rc_config.get(key, [])):
# Use repr so that it can be pasted back in to conda config --add
print("--add", key, repr(item))
# Add
for key, item in args.add:
if key not in rc_list_keys:
error_and_exit("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key), json=args.json,
error_type="ValueError")
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParse("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
if item in rc_config.get(key, []):
# Right now, all list keys should not contain duplicates
message = "Skipping %s: %s, item already exists" % (key, item)
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
rc_config.setdefault(key, []).insert(0, item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
yamlitem = yaml_load(item)
if key in set_bools:
if not isinstance(yamlitem, bool):
error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item),
json=args.json, error_type="TypeError")
rc_config[key] = yamlitem
elif key in set_strings:
rc_config[key] = yamlitem
else:
error_and_exit("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key), json=args.json,
error_type="ValueError")
# Remove
for key, item in args.remove:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
if item not in rc_config[key]:
error_and_exit("%r is not in the %r key of the config file" %
(item, key), json=args.json, error_type="KeyError")
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if args.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
| conda/cli/main_config.py
--- a/conda/cli/main_config.py
+++ b/conda/cli/main_config.py
@@ -218,26 +218,21 @@ def execute_config(args, parser):
else:
rc_path = user_rc_path
- # Create the file if it doesn't exist
- if not os.path.exists(rc_path):
- has_defaults = ['channels', 'defaults'] in args.add
- if args.add and 'channels' in list(zip(*args.add))[0] and not has_defaults:
- # If someone adds a channel and their .condarc doesn't exist, make
- # sure it includes the defaults channel, or else they will end up
- # with a broken conda.
- rc_text = """\
-channels:
- - defaults
-"""
- else:
- rc_text = ""
+ # read existing condarc
+ if os.path.exists(rc_path):
+ with open(rc_path, 'r') as fh:
+ rc_config = yaml_load(fh)
else:
- with open(rc_path, 'r') as rc:
- rc_text = rc.read()
- rc_config = yaml_load(rc_text)
- if rc_config is None:
rc_config = {}
+ # add `defaults` channel if creating new condarc file or channel key doesn't exist currently
+ if 'channels' not in rc_config:
+ # now check to see if user wants to modify channels at all
+ if any('channels' in item[0] for item in args.add):
+ # don't need to insert defaults if it's already in args
+ if not ['channels', 'defaults'] in args.add:
+ args.add.insert(0, ['channels', 'defaults'])
+
# Get
if args.get is not None:
if args.get == []: |
conda 4.1 breaks fish integrations
The new version does not work with fish at all. While the .fish script is finally there in the bin directory, it does not seem to kick in, since the new activate.py command is messing with it. How is it supposed to work?
This is the error I get:
```
$ conda activate myenv
Traceback (most recent call last):
File "/home/jrodriguez/.local/anaconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/home/jrodriguez/.local/anaconda/lib/python3.5/site-packages/conda/cli/main.py", line 48, in main
activate.main()
File "/home/jrodriguez/.local/anaconda/lib/python3.5/site-packages/conda/cli/activate.py", line 119, in main
shelldict = shells[shell]
KeyError: 'myenv'
```
Meanwhile, I had to downgrade to conda 4.0 with `conda install conda=4.0`. Thank you!
| conda/cli/activate.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import (shells, run_in)
on_win = sys.platform == "win32"
def help(command, shell):
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
if command in ('..activate', '..checkenv'):
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
Adds the 'Scripts' and 'Library\\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
Adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
elif command == '..deactivate':
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the environment prefix, 'Scripts' and 'Library\\bin' directory
of the environment ENV from the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
Removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
else:
sys.exit("No help available for command %s" % sys.argv[1])
def prefix_from_arg(arg, shelldict):
'Returns a platform-native path'
# MSYS2 converts Unix paths to Windows paths with unix seps
# so we must check for the drive identifier too.
if shelldict['sep'] in arg and not re.match('[a-zA-Z]:', arg):
# strip is removing " marks, not \ - look carefully
native_path = shelldict['path_from'](arg)
if isdir(abspath(native_path.strip("\""))):
prefix = abspath(native_path.strip("\""))
else:
raise ValueError('could not find environment: %s' % native_path)
else:
prefix = find_prefix_name(arg.replace('/', os.path.sep))
if prefix is None:
raise ValueError('could not find environment: %s' % arg)
return prefix
def binpath_from_arg(arg, shelldict):
# prefix comes back as platform-native path
prefix = prefix_from_arg(arg, shelldict=shelldict)
if sys.platform == 'win32':
paths = [
prefix.rstrip("\\"),
os.path.join(prefix, 'Library', 'mingw-w64', 'bin'),
os.path.join(prefix, 'Library', 'usr', 'bin'),
os.path.join(prefix, 'Library', 'bin'),
os.path.join(prefix, 'Scripts'),
]
else:
paths = [
os.path.join(prefix, 'bin'),
]
# convert paths to shell-native paths
return [shelldict['path_to'](path) for path in paths]
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def get_path(shelldict):
"""Get path using a subprocess call.
os.getenv path isn't good for us, since bash on windows has a wildly different
path from Windows.
This returns PATH in the native representation of the shell - not necessarily
the native representation of the platform
"""
return run_in(shelldict["printpath"], shelldict)[0]
def main():
from conda.config import root_env_name, root_dir, changeps1
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
# all execution paths sys.exit at end.
help(sys.argv[1], sys.argv[2])
shell = sys.argv[2]
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
if len(sys.argv) == 3 or sys.argv[3].lower() == root_env_name.lower():
binpath = binpath_from_arg(root_env_name, shelldict=shelldict)
rootpath = None
elif len(sys.argv) == 4:
binpath = binpath_from_arg(sys.argv[3], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
sys.exit("Error: did not expect more than one argument")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
# Clear the root path if it is present
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 4:
sys.argv.append(root_env_name)
if len(sys.argv) > 4:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[3].lower() == root_env_name.lower():
# no need to check root env and try to install a symlink there
sys.exit(0)
# this should throw an error and exit if the env or path can't be found.
try:
prefix = prefix_from_arg(sys.argv[3], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}, not have write access to conda symlink"
.format(sys.argv[2]))
sys.exit(msg)
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
# DO NOT use os.getenv for this. One Windows especially, it shows cmd.exe settings
# for bash shells. This method uses the shell directly.
path = os.getenv(shelldict['promptvar'], '')
# failsafes
if not path:
if shelldict['exe'] == 'cmd.exe':
path = '$P$G'
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[3]
if changeps1 and env_path:
path = "(({0})) {1}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell
# script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
<|code_end|>
conda/utils.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import logging
import sys
import hashlib
import collections
from functools import partial
from os.path import abspath, isdir, join
import os
import re
import subprocess
import tempfile
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(sorted(kw.items())))
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(sorted(kw.items())))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
@memoized
def gnu_get_libc_version():
"""
If on linux, get installed version of glibc, otherwise return None
"""
if not sys.platform.startswith('linux'):
return None
from ctypes import CDLL, cdll, c_char_p
cdll.LoadLibrary('libc.so.6')
libc = CDLL('libc.so.6')
f = libc.gnu_get_libc_version
f.restype = c_char_p
return f()
def can_open(file):
"""
Return True if the given ``file`` can be opened for writing
"""
try:
fp = open(file, "ab")
fp.close()
return True
except IOError:
stderrlog.info("Unable to open %s\n" % file)
return False
def can_open_all(files):
"""
Return True if all of the provided ``files`` can be opened
"""
for f in files:
if not can_open(f):
return False
return True
def can_open_all_files_in_prefix(prefix, files):
"""
Returns True if all ``files`` at a given ``prefix`` can be opened
"""
return can_open_all((os.path.join(prefix, f) for f in files))
def try_write(dir_path):
if not isdir(dir_path):
return False
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % os.getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
os.unlink(temp_filename)
return True
except (IOError, OSError):
return False
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
def run_in(command, shell, cwd=None, env=None):
if hasattr(shell, "keys"):
shell = shell["exe"]
if shell == 'cmd.exe':
cmd_script = tempfile.NamedTemporaryFile(suffix='.bat', mode='wt', delete=False)
cmd_script.write(command)
cmd_script.close()
cmd_bits = [shells[shell]["exe"]] + shells[shell]["shell_args"] + [cmd_script.name]
try:
p = subprocess.Popen(cmd_bits, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=cwd, env=env)
stdout, stderr = p.communicate()
finally:
os.unlink(cmd_script.name)
elif shell == 'powershell':
raise NotImplementedError
else:
cmd_bits = ([shells[shell]["exe"]] + shells[shell]["shell_args"] +
[translate_stream(command, shells[shell]["path_to"])])
p = subprocess.Popen(cmd_bits, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
streams = [u"%s" % stream.decode('utf-8').replace('\r\n', '\n').rstrip("\n")
for stream in (stdout, stderr)]
return streams
def path_identity(path):
"""Used as a dummy path converter where no conversion necessary"""
return path
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def _translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/")
return root_prefix + "/" + found
path = re.sub(path_re, _translation, path).replace(";/", ":/")
return path
def unix_path_to_win(path, root_prefix=""):
"""Convert a path or :-separated string of paths into a Windows representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
# already a windows path
return path.replace("/", "\\")
path_re = root_prefix + r'(/[a-zA-Z]/(?:(?![:\s]/)[^:*?"<>])*)'
def _translation(found_path):
group = found_path.group(0)
return "{0}:{1}".format(group[len(root_prefix)+1],
group[len(root_prefix)+2:].replace("/", "\\"))
translation = re.sub(path_re, _translation, path)
translation = re.sub(":([a-zA-Z]):\\\\",
lambda match: ";" + match.group(0)[1] + ":\\",
translation)
return translation
# curry cygwin functions
def win_path_to_cygwin(path):
return win_path_to_unix(path, "/cygdrive")
def cygwin_path_to_win(path):
return unix_path_to_win(path, "/cygdrive")
def translate_stream(stream, translator):
return "\n".join(translator(line) for line in stream.split("\n"))
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
# This is necessary for Windows, for linking the environment, and for printing the correct
# activation instructions on Windows, depending on the shell type. It would be great to
# get rid of it, but I don't know how to otherwise detect which shell is used to create
# or install conda packages.
def find_parent_shell(path=False):
"""return process name or path of parent. Default is to return only name of process."""
try:
import psutil
except ImportError:
stderrlog.warn("No psutil available.\n"
"To proceed, please conda install psutil")
return None
process = psutil.Process()
pname = process.parent().name().lower()
while any(proc in pname for proc in ["conda", "python", "py.test"]):
if process:
process = process.parent()
else:
# fallback defaults to system default
if sys.platform == 'win32':
return 'cmd.exe'
else:
return 'bash'
if path:
return process.parent().exe()
return process.parent().name()
@memoized
def get_yaml():
try:
import ruamel_yaml as yaml
except ImportError:
try:
import ruamel.yaml as yaml
except ImportError:
try:
import yaml
except ImportError:
sys.exit("No yaml library available.\n"
"To proceed, please conda install ruamel_yaml")
return yaml
def yaml_load(filehandle):
yaml = get_yaml()
try:
return yaml.load(filehandle, Loader=yaml.RoundTripLoader, version="1.2")
except AttributeError:
return yaml.load(filehandle)
def yaml_dump(string):
yaml = get_yaml()
try:
return yaml.dump(string, Dumper=yaml.RoundTripDumper,
block_seq_indent=2, default_flow_style=False,
indent=4)
except AttributeError:
return yaml.dump(string, default_flow_style=False)
# TODO: this should be done in a more extensible way
# (like files for each shell, with some registration mechanism.)
# defaults for unix shells. Note: missing "exe" entry, which should be set to
# either an executable on PATH, or a full path to an executable for a shell
unix_shell_base = dict(
binpath="/bin/", # mind the trailing slash.
echo="echo",
env_script_suffix=".sh",
nul='2>/dev/null',
path_from=path_identity,
path_to=path_identity,
pathsep=":",
printdefaultenv='echo $CONDA_DEFAULT_ENV',
printpath="echo $PATH",
printps1='echo $PS1',
promptvar='PS1',
sep="/",
set_var='export ',
shell_args=["-l", "-c"],
shell_suffix="",
slash_convert=("\\", "/"),
source_setup="source",
test_echo_extra="",
var_format="${}",
)
msys2_shell_base = dict(
unix_shell_base,
path_from=unix_path_to_win,
path_to=win_path_to_unix,
binpath="/Scripts/", # mind the trailing slash.
)
if sys.platform == "win32":
shells = {
# "powershell.exe": dict(
# echo="echo",
# test_echo_extra=" .",
# var_format="${var}",
# binpath="/bin/", # mind the trailing slash.
# source_setup="source",
# nul='2>/dev/null',
# set_var='export ',
# shell_suffix=".ps",
# env_script_suffix=".ps",
# printps1='echo $PS1',
# printdefaultenv='echo $CONDA_DEFAULT_ENV',
# printpath="echo %PATH%",
# exe="powershell.exe",
# path_from=path_identity,
# path_to=path_identity,
# slash_convert = ("/", "\\"),
# ),
"cmd.exe": dict(
echo="@echo",
var_format="%{}%",
binpath="\\Scripts\\", # mind the trailing slash.
source_setup="call",
test_echo_extra="",
nul='1>NUL 2>&1',
set_var='set ',
shell_suffix=".bat",
env_script_suffix=".bat",
printps1="@echo %PROMPT%",
promptvar="PROMPT",
# parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA
printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n'
'echo %CONDA_DEFAULT_ENV% ) ELSE (\n'
'echo()',
printpath="@echo %PATH%",
exe="cmd.exe",
shell_args=["/d", "/c"],
path_from=path_identity,
path_to=path_identity,
slash_convert=("/", "\\"),
sep="\\",
pathsep=";",
),
"cygwin": dict(
unix_shell_base,
exe="bash.exe",
binpath="/Scripts/", # mind the trailing slash.
path_from=cygwin_path_to_win,
path_to=win_path_to_cygwin
),
# bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin
# entry instead. The only major difference is that it handle's cygwin's /cygdrive
# filesystem root.
"bash.exe": dict(
msys2_shell_base, exe="bash.exe",
),
"sh.exe": dict(
msys2_shell_base, exe="sh.exe",
),
}
else:
shells = {
"bash": dict(
unix_shell_base, exe="bash",
),
"zsh": dict(
unix_shell_base, exe="zsh",
),
# "fish": dict(unix_shell_base, exe="fish",
# shell_suffix=".fish",
# source_setup=""),
}
<|code_end|>
setup.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
import os
import sys
if 'develop' in sys.argv:
from setuptools import setup
else:
from distutils.core import setup
if not (sys.version_info[:2] == (2, 7) or sys.version_info[:2] >= (3, 3)):
sys.exit("conda is only meant for Python 2.7 or 3.3 and up. "
"current version: %d.%d" % sys.version_info[:2])
if os.environ.get('CONDA_DEFAULT_ENV'):
# Try to prevent accidentally installing conda into a non-root conda environment
sys.exit("""
You appear to be in a non-root conda environment. Conda is only supported in
the root environment. Deactivate and try again. If you believe this message
is in error, run CONDA_DEFAULT_ENV='' python setup.py.
""")
# When executing the setup.py, we need to be able to import ourselves, this
# means that we need to add the src directory to the sys.path.
here = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.join(here, "conda")
sys.path.insert(0, src_dir)
import auxlib # noqa -- build-time dependency only
import conda # NOQA
with open(os.path.join(here, "README.rst")) as f:
long_description = f.read()
scripts = ['bin/activate',
'bin/deactivate', ]
if sys.platform == 'win32':
# Powershell scripts should go here
scripts.extend(['bin/activate.bat',
'bin/deactivate.bat'])
setup(
name=conda.__name__,
version=conda.__version__,
author=conda.__author__,
author_email=conda.__email__,
url=conda.__url__,
license=conda.__license__,
description=conda.__summary__,
long_description=long_description,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
packages=[
'conda',
'conda.cli',
'conda.progressbar'
],
cmdclass={
'build_py': auxlib.BuildPyCommand,
'sdist': auxlib.SDistCommand,
},
install_requires=[
'pycosat >=0.6.1',
'pyyaml',
'requests',
],
entry_points={
'console_scripts': [
"conda = conda.cli.main:main"
],
},
scripts=scripts,
zip_safe=False,
)
<|code_end|>
| conda/cli/activate.py
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import (shells, run_in)
on_win = sys.platform == "win32"
def help(command, shell):
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
if command in ('..activate', '..checkenv'):
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
Adds the 'Scripts' and 'Library\\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
Adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
elif command == '..deactivate':
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the environment prefix, 'Scripts' and 'Library\\bin' directory
of the environment ENV from the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
Removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
else:
sys.exit("No help available for command %s" % sys.argv[1])
def prefix_from_arg(arg, shelldict):
'Returns a platform-native path'
# MSYS2 converts Unix paths to Windows paths with unix seps
# so we must check for the drive identifier too.
if shelldict['sep'] in arg and not re.match('[a-zA-Z]:', arg):
# strip is removing " marks, not \ - look carefully
native_path = shelldict['path_from'](arg)
if isdir(abspath(native_path.strip("\""))):
prefix = abspath(native_path.strip("\""))
else:
raise ValueError('could not find environment: %s' % native_path)
else:
prefix = find_prefix_name(arg.replace('/', os.path.sep))
if prefix is None:
raise ValueError('could not find environment: %s' % arg)
return prefix
def binpath_from_arg(arg, shelldict):
# prefix comes back as platform-native path
prefix = prefix_from_arg(arg, shelldict=shelldict)
if sys.platform == 'win32':
paths = [
prefix.rstrip("\\"),
os.path.join(prefix, 'Library', 'mingw-w64', 'bin'),
os.path.join(prefix, 'Library', 'usr', 'bin'),
os.path.join(prefix, 'Library', 'bin'),
os.path.join(prefix, 'Scripts'),
]
else:
paths = [
os.path.join(prefix, 'bin'),
]
# convert paths to shell-native paths
return [shelldict['path_to'](path) for path in paths]
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def get_path(shelldict):
"""Get path using a subprocess call.
os.getenv path isn't good for us, since bash on windows has a wildly different
path from Windows.
This returns PATH in the native representation of the shell - not necessarily
the native representation of the platform
"""
return run_in(shelldict["printpath"], shelldict)[0]
def main():
from conda.config import root_env_name, root_dir, changeps1
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
# all execution paths sys.exit at end.
help(sys.argv[1], sys.argv[2])
shell = sys.argv[2]
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
if len(sys.argv) == 3 or sys.argv[3].lower() == root_env_name.lower():
binpath = binpath_from_arg(root_env_name, shelldict=shelldict)
rootpath = None
elif len(sys.argv) == 4:
binpath = binpath_from_arg(sys.argv[3], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
sys.exit("Error: did not expect more than one argument")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
# Clear the root path if it is present
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
path = path.lstrip()
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
# Clean up any doubled-up path separators
path = path.replace(shelldict['pathsep'] * 2, shelldict['pathsep'])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 4:
sys.argv.append(root_env_name)
if len(sys.argv) > 4:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[3].lower() == root_env_name.lower():
# no need to check root env and try to install a symlink there
sys.exit(0)
# this should throw an error and exit if the env or path can't be found.
try:
prefix = prefix_from_arg(sys.argv[3], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}, not have write access to conda symlink"
.format(sys.argv[2]))
sys.exit(msg)
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
# DO NOT use os.getenv for this. One Windows especially, it shows cmd.exe settings
# for bash shells. This method uses the shell directly.
path = os.getenv(shelldict['promptvar'], '')
# failsafes
if not path:
if shelldict['exe'] == 'cmd.exe':
path = '$P$G'
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[3]
if changeps1 and env_path:
path = "(({0})) {1}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell
# script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
conda/utils.py
from __future__ import print_function, division, absolute_import
import logging
import sys
import hashlib
import collections
from functools import partial
from os.path import abspath, isdir, join
import os
import re
import subprocess
import tempfile
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(sorted(kw.items())))
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(sorted(kw.items())))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
@memoized
def gnu_get_libc_version():
"""
If on linux, get installed version of glibc, otherwise return None
"""
if not sys.platform.startswith('linux'):
return None
from ctypes import CDLL, cdll, c_char_p
cdll.LoadLibrary('libc.so.6')
libc = CDLL('libc.so.6')
f = libc.gnu_get_libc_version
f.restype = c_char_p
return f()
def can_open(file):
"""
Return True if the given ``file`` can be opened for writing
"""
try:
fp = open(file, "ab")
fp.close()
return True
except IOError:
stderrlog.info("Unable to open %s\n" % file)
return False
def can_open_all(files):
"""
Return True if all of the provided ``files`` can be opened
"""
for f in files:
if not can_open(f):
return False
return True
def can_open_all_files_in_prefix(prefix, files):
"""
Returns True if all ``files`` at a given ``prefix`` can be opened
"""
return can_open_all((os.path.join(prefix, f) for f in files))
def try_write(dir_path):
if not isdir(dir_path):
return False
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % os.getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
os.unlink(temp_filename)
return True
except (IOError, OSError):
return False
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
def run_in(command, shell, cwd=None, env=None):
if hasattr(shell, "keys"):
shell = shell["exe"]
if shell == 'cmd.exe':
cmd_script = tempfile.NamedTemporaryFile(suffix='.bat', mode='wt', delete=False)
cmd_script.write(command)
cmd_script.close()
cmd_bits = [shells[shell]["exe"]] + shells[shell]["shell_args"] + [cmd_script.name]
try:
p = subprocess.Popen(cmd_bits, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=cwd, env=env)
stdout, stderr = p.communicate()
finally:
os.unlink(cmd_script.name)
elif shell == 'powershell':
raise NotImplementedError
else:
cmd_bits = ([shells[shell]["exe"]] + shells[shell]["shell_args"] +
[translate_stream(command, shells[shell]["path_to"])])
p = subprocess.Popen(cmd_bits, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
streams = [u"%s" % stream.decode('utf-8').replace('\r\n', '\n').rstrip("\n")
for stream in (stdout, stderr)]
return streams
def path_identity(path):
"""Used as a dummy path converter where no conversion necessary"""
return path
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def _translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/")
return root_prefix + "/" + found
path = re.sub(path_re, _translation, path).replace(";/", ":/")
return path
def unix_path_to_win(path, root_prefix=""):
"""Convert a path or :-separated string of paths into a Windows representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
# already a windows path
return path.replace("/", "\\")
path_re = root_prefix + r'(/[a-zA-Z]/(?:(?![:\s]/)[^:*?"<>])*)'
def _translation(found_path):
group = found_path.group(0)
return "{0}:{1}".format(group[len(root_prefix)+1],
group[len(root_prefix)+2:].replace("/", "\\"))
translation = re.sub(path_re, _translation, path)
translation = re.sub(":([a-zA-Z]):\\\\",
lambda match: ";" + match.group(0)[1] + ":\\",
translation)
return translation
# curry cygwin functions
def win_path_to_cygwin(path):
return win_path_to_unix(path, "/cygdrive")
def cygwin_path_to_win(path):
return unix_path_to_win(path, "/cygdrive")
def translate_stream(stream, translator):
return "\n".join(translator(line) for line in stream.split("\n"))
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
# This is necessary for Windows, for linking the environment, and for printing the correct
# activation instructions on Windows, depending on the shell type. It would be great to
# get rid of it, but I don't know how to otherwise detect which shell is used to create
# or install conda packages.
def find_parent_shell(path=False):
"""return process name or path of parent. Default is to return only name of process."""
try:
import psutil
except ImportError:
stderrlog.warn("No psutil available.\n"
"To proceed, please conda install psutil")
return None
process = psutil.Process()
pname = process.parent().name().lower()
while any(proc in pname for proc in ["conda", "python", "py.test"]):
if process:
process = process.parent()
else:
# fallback defaults to system default
if sys.platform == 'win32':
return 'cmd.exe'
else:
return 'bash'
if path:
return process.parent().exe()
return process.parent().name()
@memoized
def get_yaml():
try:
import ruamel_yaml as yaml
except ImportError:
try:
import ruamel.yaml as yaml
except ImportError:
try:
import yaml
except ImportError:
sys.exit("No yaml library available.\n"
"To proceed, please conda install ruamel_yaml")
return yaml
def yaml_load(filehandle):
yaml = get_yaml()
try:
return yaml.load(filehandle, Loader=yaml.RoundTripLoader, version="1.2")
except AttributeError:
return yaml.load(filehandle)
def yaml_dump(string):
yaml = get_yaml()
try:
return yaml.dump(string, Dumper=yaml.RoundTripDumper,
block_seq_indent=2, default_flow_style=False,
indent=4)
except AttributeError:
return yaml.dump(string, default_flow_style=False)
# TODO: this should be done in a more extensible way
# (like files for each shell, with some registration mechanism.)
# defaults for unix shells. Note: missing "exe" entry, which should be set to
# either an executable on PATH, or a full path to an executable for a shell
unix_shell_base = dict(
binpath="/bin/", # mind the trailing slash.
echo="echo",
env_script_suffix=".sh",
nul='2>/dev/null',
path_from=path_identity,
path_to=path_identity,
pathsep=":",
printdefaultenv='echo $CONDA_DEFAULT_ENV',
printpath="echo $PATH",
printps1='echo $PS1',
promptvar='PS1',
sep="/",
set_var='export ',
shell_args=["-l", "-c"],
shell_suffix="",
slash_convert=("\\", "/"),
source_setup="source",
test_echo_extra="",
var_format="${}",
)
msys2_shell_base = dict(
unix_shell_base,
path_from=unix_path_to_win,
path_to=win_path_to_unix,
binpath="/Scripts/", # mind the trailing slash.
)
if sys.platform == "win32":
shells = {
# "powershell.exe": dict(
# echo="echo",
# test_echo_extra=" .",
# var_format="${var}",
# binpath="/bin/", # mind the trailing slash.
# source_setup="source",
# nul='2>/dev/null',
# set_var='export ',
# shell_suffix=".ps",
# env_script_suffix=".ps",
# printps1='echo $PS1',
# printdefaultenv='echo $CONDA_DEFAULT_ENV',
# printpath="echo %PATH%",
# exe="powershell.exe",
# path_from=path_identity,
# path_to=path_identity,
# slash_convert = ("/", "\\"),
# ),
"cmd.exe": dict(
echo="@echo",
var_format="%{}%",
binpath="\\Scripts\\", # mind the trailing slash.
source_setup="call",
test_echo_extra="",
nul='1>NUL 2>&1',
set_var='set ',
shell_suffix=".bat",
env_script_suffix=".bat",
printps1="@echo %PROMPT%",
promptvar="PROMPT",
# parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA
printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n'
'echo %CONDA_DEFAULT_ENV% ) ELSE (\n'
'echo()',
printpath="@echo %PATH%",
exe="cmd.exe",
shell_args=["/d", "/c"],
path_from=path_identity,
path_to=path_identity,
slash_convert=("/", "\\"),
sep="\\",
pathsep=";",
),
"cygwin": dict(
unix_shell_base,
exe="bash.exe",
binpath="/Scripts/", # mind the trailing slash.
path_from=cygwin_path_to_win,
path_to=win_path_to_cygwin
),
# bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin
# entry instead. The only major difference is that it handle's cygwin's /cygdrive
# filesystem root.
"bash.exe": dict(
msys2_shell_base, exe="bash.exe",
),
"sh.exe": dict(
msys2_shell_base, exe="sh.exe",
),
}
else:
shells = {
"bash": dict(
unix_shell_base, exe="bash",
),
"zsh": dict(
unix_shell_base, exe="zsh",
),
"fish": dict(
unix_shell_base, exe="fish",
pathsep=" ",
),
}
setup.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
import os
import sys
if 'develop' in sys.argv:
from setuptools import setup
else:
from distutils.core import setup
if not (sys.version_info[:2] == (2, 7) or sys.version_info[:2] >= (3, 3)):
sys.exit("conda is only meant for Python 2.7 or 3.3 and up. "
"current version: %d.%d" % sys.version_info[:2])
if os.environ.get('CONDA_DEFAULT_ENV'):
# Try to prevent accidentally installing conda into a non-root conda environment
sys.exit("""
You appear to be in a non-root conda environment. Conda is only supported in
the root environment. Deactivate and try again. If you believe this message
is in error, run CONDA_DEFAULT_ENV='' python setup.py.
""")
# When executing the setup.py, we need to be able to import ourselves, this
# means that we need to add the src directory to the sys.path.
here = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.join(here, "conda")
sys.path.insert(0, src_dir)
import auxlib # noqa -- build-time dependency only
import conda # NOQA
with open(os.path.join(here, "README.rst")) as f:
long_description = f.read()
scripts = ['bin/activate',
'bin/deactivate',
'bin/conda.fish',
]
if sys.platform == 'win32':
# Powershell scripts should go here
scripts.extend(['bin/activate.bat',
'bin/deactivate.bat'])
setup(
name=conda.__name__,
version=conda.__version__,
author=conda.__author__,
author_email=conda.__email__,
url=conda.__url__,
license=conda.__license__,
description=conda.__summary__,
long_description=long_description,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
packages=[
'conda',
'conda.cli',
'conda.progressbar'
],
cmdclass={
'build_py': auxlib.BuildPyCommand,
'sdist': auxlib.SDistCommand,
},
install_requires=[
'pycosat >=0.6.1',
'pyyaml',
'requests',
],
entry_points={
'console_scripts': [
"conda = conda.cli.main:main"
],
},
scripts=scripts,
zip_safe=False,
)
| conda/cli/activate.py
--- a/conda/cli/activate.py
+++ b/conda/cli/activate.py
@@ -134,8 +134,11 @@ def main():
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
+ path = path.lstrip()
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
+ # Clean up any doubled-up path separators
+ path = path.replace(shelldict['pathsep'] * 2, shelldict['pathsep'])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
conda/utils.py
--- a/conda/utils.py
+++ b/conda/utils.py
@@ -410,7 +410,8 @@ def yaml_dump(string):
"zsh": dict(
unix_shell_base, exe="zsh",
),
- # "fish": dict(unix_shell_base, exe="fish",
- # shell_suffix=".fish",
- # source_setup=""),
+ "fish": dict(
+ unix_shell_base, exe="fish",
+ pathsep=" ",
+ ),
}
setup.py
--- a/setup.py
+++ b/setup.py
@@ -36,7 +36,9 @@
long_description = f.read()
scripts = ['bin/activate',
- 'bin/deactivate', ]
+ 'bin/deactivate',
+ 'bin/conda.fish',
+ ]
if sys.platform == 'win32':
# Powershell scripts should go here
scripts.extend(['bin/activate.bat', |
conda=4.1 no longer respects HTTP_PROXY
My proxy variables are properly set:
```
$ set https_proxy
HTTPS_PROXY=http://myproxy:8080
```
`conda=4.0.8` successfully picks up my `%HTTPS_PROXY%` and installs `conda=4.1`:
```
$ conda install conda=4.1
Fetching package metadata: ..........
Solving package specifications: .........
The following packages will be UPDATED:
conda: 4.0.8-py27_0 defaults --> 4.1.0-py27_0 defaults
```
now trying to revert back to `conda=4.0`:
```
$ conda install conda=4.0
Fetching package metadata .........
Could not connect to https://repo.continuum.io/pkgs/free/win-64/
Connection error: HTTPSConnectionPool(host='repo.continuum.io', port=443): Max retries exceeded with url...
```
proxy settings no longer work. Configuring `proxy_servers` in `.condarc` restores connectivity.
| conda/connection.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import base64
import cgi
import email
import ftplib
import mimetypes
import os
import platform
import re
import requests
import tempfile
from io import BytesIO
from logging import getLogger
from . import __version__ as VERSION
from .compat import urlparse, StringIO
from .config import platform as config_platform, ssl_verify, get_proxy_servers
from .utils import gnu_get_libc_version
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if config_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif config_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=VERSION,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
self.trust_env = False # disable .netrc file
# also disables REQUESTS_CA_BUNDLE, CURL_CA_BUNDLE env variables
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
<|code_end|>
| conda/connection.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import base64
import cgi
import email
import ftplib
import mimetypes
import os
import platform
import re
import requests
import tempfile
from io import BytesIO
from logging import getLogger
from . import __version__ as VERSION
from .compat import urlparse, StringIO
from .config import platform as config_platform, ssl_verify, get_proxy_servers
from .utils import gnu_get_libc_version
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if config_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif config_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=VERSION,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
self.auth = NullAuth() # disable .netrc file. for reference, see
# https://github.com/Anaconda-Platform/anaconda-client/pull/298
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = ssl_verify
class NullAuth(requests.auth.AuthBase):
'''force requests to ignore the ``.netrc``
Some sites do not support regular authentication, but we still
want to store credentials in the ``.netrc`` file and submit them
as form elements. Without this, requests would otherwise use the
.netrc which leads, on some sites, to a 401 error.
https://github.com/kennethreitz/requests/issues/2773
Use with::
requests.get(url, auth=NullAuth())
'''
def __call__(self, r):
return r
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
| conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -90,8 +90,8 @@ def __init__(self, *args, **kwargs):
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
- self.trust_env = False # disable .netrc file
- # also disables REQUESTS_CA_BUNDLE, CURL_CA_BUNDLE env variables
+ self.auth = NullAuth() # disable .netrc file. for reference, see
+ # https://github.com/Anaconda-Platform/anaconda-client/pull/298
# Configure retries
if retries:
@@ -112,6 +112,22 @@ def __init__(self, *args, **kwargs):
self.verify = ssl_verify
+
+class NullAuth(requests.auth.AuthBase):
+ '''force requests to ignore the ``.netrc``
+ Some sites do not support regular authentication, but we still
+ want to store credentials in the ``.netrc`` file and submit them
+ as form elements. Without this, requests would otherwise use the
+ .netrc which leads, on some sites, to a 401 error.
+ https://github.com/kennethreitz/requests/issues/2773
+ Use with::
+ requests.get(url, auth=NullAuth())
+ '''
+
+ def __call__(self, r):
+ return r
+
+
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self): |
Bizarre removal of python
Full report at https://ci.appveyor.com/project/ContinuumAnalytics/conda-build/build/1.0.163#L18
```
Package plan for installation in environment C:\Miniconda3-x64:
The following packages will be downloaded:
package | build
---------------------------|-----------------
git-2.6.4 | 0 61.2 MB
colorama-0.3.7 | py34_0 20 KB
coverage-4.1 | py34_0 235 KB
py-1.4.31 | py34_0 127 KB
pytz-2016.4 | py34_0 171 KB
six-1.10.0 | py34_0 17 KB
clyent-1.2.2 | py34_0 15 KB
pytest-2.9.2 | py34_0 283 KB
python-dateutil-2.5.3 | py34_0 238 KB
anaconda-client-1.4.0 | py34_0 156 KB
pytest-cov-2.2.1 | py34_0 17 KB
------------------------------------------------------------
Total: 62.5 MB
The following NEW packages will be INSTALLED:
anaconda-client: 1.4.0-py34_0
clyent: 1.2.2-py34_0
colorama: 0.3.7-py34_0
coverage: 4.1-py34_0
git: 2.6.4-0
py: 1.4.31-py34_0
pytest: 2.9.2-py34_0
pytest-cov: 2.2.1-py34_0
python-dateutil: 2.5.3-py34_0
pytz: 2016.4-py34_0
six: 1.10.0-py34_0
The following packages will be REMOVED:
python: 3.4.4-4
```
CC @mcg1969 @kalefranz
| conda/api.py
<|code_start|>
from __future__ import print_function, division, absolute_import
from . import install
from .compat import iteritems, itervalues
from .config import normalize_urls, prioritize_channels, get_channel_urls
from .fetch import fetch_index
from .resolve import Resolve
def get_index(channel_urls=(), prepend=True, platform=None,
use_local=False, use_cache=False, unknown=False,
offline=False, prefix=None):
"""
Return the index of packages available on the channels
If prepend=False, only the channels passed in as arguments are used.
If platform=None, then the current platform is used.
If prefix is supplied, then the packages installed in that prefix are added.
"""
if use_local:
channel_urls = ['local'] + list(channel_urls)
channel_urls = normalize_urls(channel_urls, platform, offline)
if prepend:
channel_urls.extend(get_channel_urls(platform, offline))
channel_urls = prioritize_channels(channel_urls)
index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown)
if prefix:
priorities = {c: p for c, p in itervalues(channel_urls)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(install.linked_data(prefix)):
fn = info['fn']
schannel = info['schannel']
prefix = '' if schannel == 'defaults' else schannel + '::'
priority = priorities.get(schannel, maxp)
key = prefix + fn
if key in index:
# Copy the link information so the resolver knows this is installed
index[key]['link'] = info.get('link')
else:
# only if the package in not in the repodata, use local
# conda-meta (with 'depends' defaulting to [])
info.setdefault('depends', [])
info['priority'] = priority
index[key] = info
return index
def get_package_versions(package, offline=False):
index = get_index(offline=offline)
r = Resolve(index)
return r.get_pkgs(package, emptyok=True)
<|code_end|>
conda/fetch.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import sys
import tempfile
import warnings
from functools import wraps
from logging import getLogger
from os.path import basename, dirname, join
from .compat import itervalues, input, urllib_quote, iterkeys, iteritems
from .config import (pkgs_dirs, DEFAULT_CHANNEL_ALIAS, remove_binstar_tokens,
hide_binstar_tokens, allowed_channels, add_pip_as_python_dependency,
ssl_verify, rc, prioritize_channels, url_channel)
from .connection import CondaSession, unparse_url, RETRIES
from .install import add_cached_package, find_new_location, package_cache, dist2pair
from .lock import Locked
from .utils import memoized
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2',
headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
(remove_binstar_tokens(url), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
user = remove_binstar_tokens(url) \
.split(DEFAULT_CHANNEL_ALIAS)[1] \
.split("/")[0]
msg = 'Could not find anaconda.org user %s' % user
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and
rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
sys.exit("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif 'url' in meta:
url = meta['url']
elif 'channel' in meta:
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = url_channel(url)
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
if not isinstance(channel_urls, dict):
channel_urls = prioritize_channels(channel_urls)
for url in iterkeys(channel_urls):
if allowed_channels and url not in allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(allowed_channels)))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError):
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in iterkeys(channel_urls)]
else:
try:
urls = tuple(channel_urls)
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if not url and 'channel' in rec:
url = rec['url'] = rec['channel'] + rec['fn']
if rec['fn'][:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if on_win else ''
return unc_prefix + join(pkg_dir, '.trash')
def _safe_relpath(path, start_path):
"""
Used in the move_to_trash flow. Ensures that the result does not
start with any '..' which would allow to escape the trash folder
(and root prefix) and potentially ruin the user's system.
"""
result = normpath(relpath(path, start_path))
parts = result.rsplit(os.sep)
for idx, part in enumerate(parts):
if part != u'..':
return os.sep.join(parts[idx:])
return u''
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, _safe_relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
if sys.platform == "win32":
p.add_argument(
"--shortcuts",
action="store_true",
help="Install start menu shortcuts"
)
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype, opts.shortcuts)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/api.py
from __future__ import print_function, division, absolute_import
from . import install
from .compat import iteritems, itervalues
from .config import normalize_urls, prioritize_channels, get_channel_urls
from .fetch import fetch_index
from .resolve import Resolve
def get_index(channel_urls=(), prepend=True, platform=None,
use_local=False, use_cache=False, unknown=False,
offline=False, prefix=None):
"""
Return the index of packages available on the channels
If prepend=False, only the channels passed in as arguments are used.
If platform=None, then the current platform is used.
If prefix is supplied, then the packages installed in that prefix are added.
"""
if use_local:
channel_urls = ['local'] + list(channel_urls)
channel_urls = normalize_urls(channel_urls, platform, offline)
if prepend:
channel_urls.extend(get_channel_urls(platform, offline))
channel_urls = prioritize_channels(channel_urls)
index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown)
if prefix:
priorities = {c: p for c, p in itervalues(channel_urls)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(install.linked_data(prefix)):
fn = info['fn']
schannel = info['schannel']
prefix = '' if schannel == 'defaults' else schannel + '::'
priority = priorities.get(schannel, maxp)
key = prefix + fn
if key in index:
# Copy the link information so the resolver knows this is installed
index[key] = index[key].copy()
index[key]['link'] = info.get('link') or True
else:
# only if the package in not in the repodata, use local
# conda-meta (with 'depends' defaulting to [])
info.setdefault('depends', [])
info['priority'] = priority
index[key] = info
return index
def get_package_versions(package, offline=False):
index = get_index(offline=offline)
r = Resolve(index)
return r.get_pkgs(package, emptyok=True)
conda/fetch.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import sys
import tempfile
import warnings
from functools import wraps
from logging import getLogger
from os.path import basename, dirname, join
from .compat import itervalues, input, urllib_quote, iterkeys, iteritems
from .config import (pkgs_dirs, DEFAULT_CHANNEL_ALIAS, remove_binstar_tokens,
hide_binstar_tokens, allowed_channels, add_pip_as_python_dependency,
ssl_verify, rc, prioritize_channels, url_channel)
from .connection import CondaSession, unparse_url, RETRIES
from .install import add_cached_package, find_new_location, package_cache, dist2pair
from .lock import Locked
from .utils import memoized
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2',
headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
(remove_binstar_tokens(url), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
user = remove_binstar_tokens(url) \
.split(DEFAULT_CHANNEL_ALIAS)[1] \
.split("/")[0]
msg = 'Could not find anaconda.org user %s' % user
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and
rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
sys.exit("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif 'url' in meta:
url = meta['url']
elif 'channel' in meta:
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = url_channel(url)
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
if not isinstance(channel_urls, dict):
channel_urls = prioritize_channels(channel_urls)
for url in iterkeys(channel_urls):
if allowed_channels and url not in allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(allowed_channels)))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError):
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in iterkeys(channel_urls)]
else:
try:
urls = tuple(channel_urls)
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if not url and 'channel' in rec:
url = rec['url'] = rec['channel'] + rec['fn']
if rec['fn'][:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if on_win else ''
return unc_prefix + join(pkg_dir, '.trash')
def _safe_relpath(path, start_path):
"""
Used in the move_to_trash flow. Ensures that the result does not
start with any '..' which would allow to escape the trash folder
(and root prefix) and potentially ruin the user's system.
"""
result = normpath(relpath(path, start_path))
parts = result.rsplit(os.sep)
for idx, part in enumerate(parts):
if part != u'..':
return os.sep.join(parts[idx:])
return u''
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, _safe_relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
if sys.platform == "win32":
p.add_argument(
"--shortcuts",
action="store_true",
help="Install start menu shortcuts"
)
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype, opts.shortcuts)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| conda/api.py
--- a/conda/api.py
+++ b/conda/api.py
@@ -35,7 +35,8 @@ def get_index(channel_urls=(), prepend=True, platform=None,
key = prefix + fn
if key in index:
# Copy the link information so the resolver knows this is installed
- index[key]['link'] = info.get('link')
+ index[key] = index[key].copy()
+ index[key]['link'] = info.get('link') or True
else:
# only if the package in not in the repodata, use local
# conda-meta (with 'depends' defaulting to [])
conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -235,6 +235,8 @@ def add_unknown(index, priorities):
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
+ if 'link' in meta:
+ del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -861,6 +861,7 @@ def load_linked_data(prefix, dist, rec=None):
channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
+ rec['link'] = rec.get('link') or True
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec |
activate symlink error
when using source activate they get this symlink error
> While trying to use the nfs installation with a user account that lacks the write permissions on the installation and the preconfigured environments, I'm getting an error:
>
> Cannot activate environment bash, not have write access to conda symlink
>
> Is this new, or I simply didn't notice the problem in the earlier versions?
https://github.com/conda/conda/blob/4.1.x/conda/cli/activate.py#L158-L167 seems like its testing that it can create the symlink even if its already there, can anyone explain this functionality and why it would require permissions to write to the root environment?
| conda/cli/activate.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import (shells, run_in)
on_win = sys.platform == "win32"
def help(command, shell):
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
if command in ('..activate', '..checkenv'):
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
Adds the 'Scripts' and 'Library\\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
Adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
elif command == '..deactivate':
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the environment prefix, 'Scripts' and 'Library\\bin' directory
of the environment ENV from the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
Removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
else:
sys.exit("No help available for command %s" % sys.argv[1])
def prefix_from_arg(arg, shelldict):
'Returns a platform-native path'
# MSYS2 converts Unix paths to Windows paths with unix seps
# so we must check for the drive identifier too.
if shelldict['sep'] in arg and not re.match('[a-zA-Z]:', arg):
# strip is removing " marks, not \ - look carefully
native_path = shelldict['path_from'](arg)
if isdir(abspath(native_path.strip("\""))):
prefix = abspath(native_path.strip("\""))
else:
raise ValueError('could not find environment: %s' % native_path)
else:
prefix = find_prefix_name(arg.replace('/', os.path.sep))
if prefix is None:
raise ValueError('could not find environment: %s' % arg)
return prefix
def binpath_from_arg(arg, shelldict):
# prefix comes back as platform-native path
prefix = prefix_from_arg(arg, shelldict=shelldict)
if sys.platform == 'win32':
paths = [
prefix.rstrip("\\"),
os.path.join(prefix, 'Library', 'mingw-w64', 'bin'),
os.path.join(prefix, 'Library', 'usr', 'bin'),
os.path.join(prefix, 'Library', 'bin'),
os.path.join(prefix, 'Scripts'),
]
else:
paths = [
os.path.join(prefix, 'bin'),
]
# convert paths to shell-native paths
return [shelldict['path_to'](path) for path in paths]
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def get_path(shelldict):
"""Get path using a subprocess call.
os.getenv path isn't good for us, since bash on windows has a wildly different
path from Windows.
This returns PATH in the native representation of the shell - not necessarily
the native representation of the platform
"""
return run_in(shelldict["printpath"], shelldict)[0]
def main():
from conda.config import root_env_name, root_dir, changeps1
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
# all execution paths sys.exit at end.
help(sys.argv[1], sys.argv[2])
shell = sys.argv[2]
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
if len(sys.argv) == 3 or sys.argv[3].lower() == root_env_name.lower():
binpath = binpath_from_arg(root_env_name, shelldict=shelldict)
rootpath = None
elif len(sys.argv) == 4:
binpath = binpath_from_arg(sys.argv[3], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
sys.exit("Error: did not expect more than one argument")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
# Clear the root path if it is present
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
path = path.lstrip()
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
# Clean up any doubled-up path separators
path = path.replace(shelldict['pathsep'] * 2, shelldict['pathsep'])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 4:
sys.argv.append(root_env_name)
if len(sys.argv) > 4:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[3].lower() == root_env_name.lower():
# no need to check root env and try to install a symlink there
sys.exit(0)
# this should throw an error and exit if the env or path can't be found.
try:
prefix = prefix_from_arg(sys.argv[3], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}, not have write access to conda symlink"
.format(sys.argv[2]))
sys.exit(msg)
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
# DO NOT use os.getenv for this. One Windows especially, it shows cmd.exe settings
# for bash shells. This method uses the shell directly.
path = os.getenv(shelldict['promptvar'], '')
# failsafes
if not path:
if shelldict['exe'] == 'cmd.exe':
path = '$P$G'
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[3]
if changeps1 and env_path:
path = "(({0})) {1}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell
# script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
<|code_end|>
conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if not url and 'channel' in rec:
url = rec['url'] = rec['channel'] + rec['fn']
if rec['fn'][:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if on_win else ''
return unc_prefix + join(pkg_dir, '.trash')
def _safe_relpath(path, start_path):
"""
Used in the move_to_trash flow. Ensures that the result does not
start with any '..' which would allow to escape the trash folder
(and root prefix) and potentially ruin the user's system.
"""
result = normpath(relpath(path, start_path))
parts = result.rsplit(os.sep)
for idx, part in enumerate(parts):
if part != u'..':
return os.sep.join(parts[idx:])
return u''
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, _safe_relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
if sys.platform == "win32":
p.add_argument(
"--shortcuts",
action="store_true",
help="Install start menu shortcuts"
)
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype, opts.shortcuts)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/cli/activate.py
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import (shells, run_in)
on_win = sys.platform == "win32"
def help(command, shell):
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
if command in ('..activate', '..checkenv'):
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
Adds the 'Scripts' and 'Library\\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
Adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
elif command == '..deactivate':
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the environment prefix, 'Scripts' and 'Library\\bin' directory
of the environment ENV from the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
Removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
else:
sys.exit("No help available for command %s" % sys.argv[1])
def prefix_from_arg(arg, shelldict):
'Returns a platform-native path'
# MSYS2 converts Unix paths to Windows paths with unix seps
# so we must check for the drive identifier too.
if shelldict['sep'] in arg and not re.match('[a-zA-Z]:', arg):
# strip is removing " marks, not \ - look carefully
native_path = shelldict['path_from'](arg)
if isdir(abspath(native_path.strip("\""))):
prefix = abspath(native_path.strip("\""))
else:
raise ValueError('could not find environment: %s' % native_path)
else:
prefix = find_prefix_name(arg.replace('/', os.path.sep))
if prefix is None:
raise ValueError('could not find environment: %s' % arg)
return prefix
def binpath_from_arg(arg, shelldict):
# prefix comes back as platform-native path
prefix = prefix_from_arg(arg, shelldict=shelldict)
if sys.platform == 'win32':
paths = [
prefix.rstrip("\\"),
os.path.join(prefix, 'Library', 'mingw-w64', 'bin'),
os.path.join(prefix, 'Library', 'usr', 'bin'),
os.path.join(prefix, 'Library', 'bin'),
os.path.join(prefix, 'Scripts'),
]
else:
paths = [
os.path.join(prefix, 'bin'),
]
# convert paths to shell-native paths
return [shelldict['path_to'](path) for path in paths]
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def get_path(shelldict):
"""Get path using a subprocess call.
os.getenv path isn't good for us, since bash on windows has a wildly different
path from Windows.
This returns PATH in the native representation of the shell - not necessarily
the native representation of the platform
"""
return run_in(shelldict["printpath"], shelldict)[0]
def main():
from conda.config import root_env_name, root_dir, changeps1
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
# all execution paths sys.exit at end.
help(sys.argv[1], sys.argv[2])
shell = sys.argv[2]
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
if len(sys.argv) == 3 or sys.argv[3].lower() == root_env_name.lower():
binpath = binpath_from_arg(root_env_name, shelldict=shelldict)
rootpath = None
elif len(sys.argv) == 4:
binpath = binpath_from_arg(sys.argv[3], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
sys.exit("Error: did not expect more than one argument")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
# Clear the root path if it is present
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
path = path.lstrip()
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
# Clean up any doubled-up path separators
path = path.replace(shelldict['pathsep'] * 2, shelldict['pathsep'])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 4:
sys.argv.append(root_env_name)
if len(sys.argv) > 4:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[3].lower() == root_env_name.lower():
# no need to check root env and try to install a symlink there
sys.exit(0)
# this should throw an error and exit if the env or path can't be found.
try:
prefix = prefix_from_arg(sys.argv[3], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}.\n"
"User does not have write access for conda symlinks."
.format(sys.argv[2]))
sys.exit(msg)
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
# DO NOT use os.getenv for this. One Windows especially, it shows cmd.exe settings
# for bash shells. This method uses the shell directly.
path = os.getenv(shelldict['promptvar'], '')
# failsafes
if not path:
if shelldict['exe'] == 'cmd.exe':
path = '$P$G'
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[3]
if changeps1 and env_path:
path = "(({0})) {1}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell
# script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, relpath, normpath)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
if 'cmd.exe' in shell.lower():
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\n"%s" %%*\n' % src)
elif 'powershell' in shell.lower():
# TODO: probably need one here for powershell at some point
pass
else:
# This one is for bash/cygwin/msys
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
elif isdir(path):
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
if trash:
try:
move_path_to_trash(path)
if not isdir(path):
return
except OSError as e2:
raise
msg += "Retry with onerror failed (%s)\n" % e2
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with open(path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if 'url' not in meta:
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell):
# do not symlink root env - this clobbers activate incorrectly.
if normpath(prefix) == normpath(sys.prefix):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno == errno.EPERM or e.errno == errno.EACCES)):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
os.rename(temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
if 'fn' not in rec:
rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if not url and 'channel' in rec:
url = rec['url'] = rec['channel'] + rec['fn']
if rec['fn'][:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel, schannel = url_channel(url)
rec['channel'] = channel
rec['schannel'] = schannel
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def _get_trash_dir(pkg_dir):
unc_prefix = u'\\\\?\\' if on_win else ''
return unc_prefix + join(pkg_dir, '.trash')
def _safe_relpath(path, start_path):
"""
Used in the move_to_trash flow. Ensures that the result does not
start with any '..' which would allow to escape the trash folder
(and root prefix) and potentially ruin the user's system.
"""
result = normpath(relpath(path, start_path))
parts = result.rsplit(os.sep)
for idx, part in enumerate(parts):
if part != u'..':
return os.sep.join(parts[idx:])
return u''
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = _get_trash_dir(pkg_dir)
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
delete_trash()
from conda.config import root_dir
for pkg_dir in pkgs_dirs:
import tempfile
trash_dir = _get_trash_dir(pkg_dir)
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_dir = tempfile.mkdtemp(dir=trash_dir)
trash_dir = join(trash_dir, _safe_relpath(os.path.dirname(path), root_dir))
try:
os.makedirs(trash_dir)
except OSError as e2:
if e2.errno != errno.EEXIST:
continue
try:
shutil.move(path, trash_dir)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e))
else:
delete_linked_data_any(path)
return True
log.debug("Could not move %s to trash" % path)
return False
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
try:
os.unlink(dst)
except OSError:
log.error('failed to unlink: %r' % dst)
if on_win:
try:
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
pass
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
if name_dist(dist) == '_cache':
return
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
try:
os.unlink(dst)
except OSError: # file might not exist
log.debug("could not remove file: '%s'" % dst)
if on_win and os.path.exists(join(prefix, f)):
try:
log.debug("moving to trash")
move_path_to_trash(dst)
except ImportError:
# This shouldn't be an issue in the installer anyway
# but it can potentially happen with importing conda.config
log.debug("cannot import conda.config; probably not an issue")
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
interable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
if sys.platform == "win32":
p.add_argument(
"--shortcuts",
action="store_true",
help="Install start menu shortcuts"
)
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs[0] = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
linktype = (LINK_HARD
if idists and try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype, opts.shortcuts)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| conda/cli/activate.py
--- a/conda/cli/activate.py
+++ b/conda/cli/activate.py
@@ -163,7 +163,8 @@ def main():
conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
- msg = ("Cannot activate environment {0}, not have write access to conda symlink"
+ msg = ("Cannot activate environment {0}.\n"
+ "User does not have write access for conda symlinks."
.format(sys.argv[2]))
sys.exit(msg)
raise
conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -573,13 +573,20 @@ def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
- # try to kill stale links if they exist
- if os.path.lexists(prefix_file):
- os.remove(prefix_file)
- # if they're in use, they won't be killed. Skip making new symlink.
- if not os.path.lexists(prefix_file):
- symlink_fn(root_file, prefix_file)
-
+ try:
+ # try to kill stale links if they exist
+ if os.path.lexists(prefix_file):
+ os.remove(prefix_file)
+ # if they're in use, they won't be killed. Skip making new symlink.
+ if not os.path.lexists(prefix_file):
+ symlink_fn(root_file, prefix_file)
+ except (IOError, OSError) as e:
+ if (os.path.lexists(prefix_file) and
+ (e.errno == errno.EPERM or e.errno == errno.EACCES)):
+ log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
+ .format(root_file, prefix_file))
+ else:
+ raise
# ========================== begin API functions =========================
|
conda list broken in 4.1?
In case it helps I instrumented my `cp1252.py`
``` python
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
try:
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
except Exception as exc:
msg = (
"input = {}\n"
"type(input) = {}\n"
"errors = {}\n"
"type(errors) = {}\n"
"decoding_table = {}\n"
"type(decoding_table) = {}\n"
)
msg = msg.format(
input, type(input),
self.errors, type(self.errors),
decoding_table, type(decoding_table),
)
raise Exception(msg) from exc
```
...with output below:
```
Traceback (most recent call last):
File "C:\Python\Scripts\conda-script.py", line 5, in <module>
sys.exit(main())
File "C:\Python\lib\site-packages\conda\cli\main.py", line 120, in main
args_func(args, p)
File "C:\Python\lib\site-packages\conda\cli\main.py", line 127, in args_func
args.func(args, p)
File "C:\Python\lib\site-packages\conda\cli\main_list.py", line 268, in execute
show_channel_urls=args.show_channel_urls)
File "C:\Python\lib\site-packages\conda\cli\main_list.py", line 178, in print_packages
installed.update(get_egg_info(prefix))
File "C:\Python\lib\site-packages\conda\egg_info.py", line 82, in get_egg_info
dist = parse_egg_info(path)
File "C:\Python\lib\site-packages\conda\egg_info.py", line 49, in parse_egg_info
for line in open(path):
File "C:\Python\lib\encodings\cp1252.py", line 39, in decode
raise Exception(msg) from exc
Exception: input = b'#!/bin/sh\r\nif [ `basename $0` = "setuptools-20.3-py3.5.egg" ]\r\nthen exec python3.5 -c "import sys, os; sys.path.insert(0, os.path.abspath(\'$0\')); from setuptools.command.easy_install import bootstrap; sys.exit(bootstrap())" "$@"\r\nelse\r\n echo $0 is not the correct name for this egg file.\r\n echo Please rename it back to setuptools-20.3-py3.5.egg and try again.\r\n exec false\r\nfi\r\nPK\x03\x04\x14\x00\x00\x00\x08\x00|]>H\\O\xbaEi\x00\x00\x00~\x00\x00\x00\x0f\x00\x00\x00easy_install.py-\xcc1\x0e\x83@\x0cD\xd1~Oa\xb9\x814\x1c \x12e\x8a\xb4\\`d\x91EYim#\xd6\x14\xb9=\x101\xd54\xef3\xf3\xb4\x1b\xc57\xd3K\xda\xefm-\xa4V\x9a]U\xec\xc3\xcc)\x95\x85\x00\x13\xcd\x00\x8d#u\x80J1\xa0{&:\xb7l\xae\xd4r\xeck\xb8\xd76\xdct\xc8g\x0e\xe5\xee\x15]}\x0b\xba\xe0\x1f]\xa7\x7f\xa4\x03PK\x03\x04\x14\x00\x00\x00\x08\x00P\x96qH\x93\x06\xd72\x03\x00\x00\x00\x01\x00\x00\x00\x1d\x00\x00\x00EGG-INFO/dependency_links.txt\xe3\x02\x00PK\x03\x04\x14\x00\x00\x00\x08\x00P\x96qH1\x8f\x97\x14\x84\x02\x00\x00\x13\x0b\x00\x00\x19\x00\x00\x00EGG-INFO/entry_points.txt\x95V\xcd\x8e\xdb \x10\xbe\xf3\x14\xfb\x02\x9bCW\xbd \xf5\xda\xaa\xaa\xd4\xf6\x1e\xad\x10\xb6\')\x8a\r\x14p\x12\xf7\xe9;`\xf0\x12\xcb8\xf6\xc5\x0c\x9e\xef\x9b\x1f33\xe6X+iU\x0b\xcc\xd6Fhg\xdf\tp;0!\xad\xe3m\xfb\xf2\xe5\xc5\x82\xeb\xb5S\xaa\xb5\x87Zu\x1d\x97\xcd!G\xd0\x8e\x0b\xf9\xc0y};|\xde\xca#\xc7FX\xd7;\xf1\x81\xc2\x08x+\xb8]6\x11T4<I\xe5\xb9\x0c\xce\xe7e\xe8\xa4\xa6\x93\x14)Fwk\x14T\xd3I\x8a\x94\x9b\x90>\xf05Z\x84\xd0\x87\x1d\xa9z\xd16\x0c\xee%jR\xd3I\x8a\x14=\xac1\xf4@\x93@\x1a\xb8B\xab\xf42<*i\\\xf7\x9en\xbe!\xf8\x05Q>\xa9\x02/ji\x12\xc8\xaa\x9b\xe4!\x19\x8f+[w2G\xd1\xf9\x8b\xc9N+\xaau\x13\x08\xa0\x99<\x11c#\xac\x93#\x88\xce\xf6\xc4\xc0\x19O\x1f\xcc2;ii\x12\x88Q\x8e;(\xa0\x83\x8e\x8e\x0b\xb1\xfc\n\xaa\x18W\xd2\xd2$\x10\xeb\xcb\xb0\x00\xf6*\x1a\x9e\x04\xd5\x08/\xe0\x82\x8e\x8e\x0bqP\xb2\xe75\xd4?H\xaf[\xc5\x9be\xd4\xa8\xa3\xe3\x12\x91\xacQu!\xa3\x0c@3\xf9ad\x04\x1a\xbb\xc0pS\xc6\x0f\x0e\x9ceW0\x8e}r\xea\xcd\xa3}L3\xf3!un\xad\x87Yg\x84<\xe3\xe1c\xe4\rh\x90\r\xc8z\xc0\xbd\xbcld\x01?\x83a\x06\xac\xeaM\r[I\xd2\x99\x81i%\xe4bp\xf5\x1f\xa8/,\x07\x11\xb8\xd7m\xdf\x00\xd3\xbc\xbe\xa0G\xd6p\xc7\x8b\xcc\x1c\x84Lg\xb8\xc5\x08\xff\xf6\xc2@\xd9[\x80a\x0bl\xf2\x13s\xaa\xf0\xcd\xd45\xd1C9\xa1\x08\xe8\xc0\'$y\x07\x16\xbdL\xae\xca<i5\xd9\x9f\xf7S\xb3\t@\xc6\x1a\xda\x17\xbe\xaf+\xe6Kr\xde\xe8\x19AtZ\x19\xc7\xab\x16F\xb8\xe9\xa5\xdc\x01\xb7\xbd\x98\xcf\x85\x0c\xfd\x01\t\xe8\xe7\x07\xfc\x10~o!\xb4\xc8\x93\xa3M0\x96\xca\xef$\xee`6\x16\xf9D\xdeC\xfa\'4\xb3\xfc\xb4\x94F\x1e\x189\xa6i\x7f\xb8\x19\xfc\x06\x06[\xff\xf7\x8fo\xaf\xdf\x7f~\xfd\xf5\xe4\xdf\x14\xf0L_\xe2\xcfb\xde\xf5\x07W\xfaQO\x16\x14N\x98\xd1\n7\xe7h`\x0b\xef\xc6\x8dd\x11\xceT\xe5\xef\\xz\xb3\x01\xb2\xdb\x7f>&\xb6\x04\x11\x88\x9e$`\xa9\x0bw\xfbO}\xb3\xd9\xf7c\x1f)\xcdZ\x7f1\xd9LGF \xb0\x10;VBF\x89\xa3\x88;\xa1\xe4\xbb\xbf\xacX\xa8\xfb\xd0R\x1b.:XX\x0eK\x91kB\xfe\x03PK\x03\x04\x14\x00\x00\x00\x08\x00P\x96qH:p\x95\x8f~\x10\x00\x00c3\x00\x00\x11\x00\x00\x00EGG-INFO/PKG-INFO\xbd[\xedr\xdbF\x96\xfd\xef*\xbfC\xaf2U\x96T$\x18\xc7\xb1\'aMR\xab\xc8v\xa2LliCe\xbd\xbb5Uf\x13h\x92\xbd\x02\xd0\x184@\x8a\xa9y\xa1y\x8e}\xb1=\xf7v\x03hH\xa0D\xed\xee\xac\xff\x04"\xd0\xb7\xef\xe7\xb9\x1f\xdd\xf9\xa0*\x99\xc8J\x8e\xffU\x95V\x9b|*^F/\x9f?\xfb(35\x15VUuQ\x19\x93\xda\xe7\xcf\xda\xf7_}\x19\xbdz\xfelVg\x99,wS\xf1NZ\x9d\xeeDb\xb6yjd2\x12\x8bZ\xa7\xf8\x8f\xcem%\xd3t$\xeabU\xcaD\x8d\x84\xcc\x13Q\xe7\xfewq\xb5\xab\xd6&\x17\x85\x8co\xe4Ja\x83\x9fL\xa6\xc6\x05\x9e\xa7b]U\x85\x9dN&\x0b]-\xea\xf8FU\x91)W\x93bW\xc8I\xc8\xd2Y\r\x12\xe5\xb4!u\xc5\xa4t\xbe\x12\xee\x85\xaev\xcd7c\x95I\x9dNE\xa2mUW:\xb5c\xabW\xff\\\xf0:\xa2\xfd\xfc\xd9/:V\xb9\xc5\xde\xbf}\xfc\xf3\xc7\xcbO\x1f\x9f?{\xabl\\\xea\xa2b\xa1\xbf{\xf8\xdf\xf3g\xc2\xff\xbbp\xe2\x11\x17$\xefo\x96\x9ef\x01\xd3\xcd\x87\x07S\xec\x9e\xa2H\xc4&\xafT^\xd9\xe9T\x9c\x9e^\xcbE\xaa\x84Y\x8as\xff\xeb\xe9\xe9\xd0\xb2\xeei~\xbe\x96\xf9J\x89\x9f\xa0\x06S\xee\xc4\x9f\x1aE;M\xac\x8d\xadT\xc2\xba\xee\xd4<Y\xbb\x8f\xa3u\x95\xa5\xdf\xcf?GC\x84\xc7\xfb\xfe\xddS\x8c$}\xf2\x1fe\x1d\xd3\xb3}\x12\x95\xee\xe9z\xadD\xa9b\x93e*OT"\xb6r\'*#\x16\xc6T\xa0-\x8b\xc0y\x05v\x94\xf9N\xd8\x1d\xe4\xcb\x84\xb6\xf4a\xe3\xb0\x81v\xd4\xef\x9fyQT\xec\xe6\x9f\xd9|e\x9d\x0b]\x89\x9a\xadXa\xc7J\x96+U5\x1e\xa7\xf2\x8d.M\x0e\x0e\xaaH\xbc\xd5\xcb\xa5*\xf1\xd8Q4\x85*!0\xd6\xba\xad\xadX\xcb\x8d\x82\x17\xfa/{\x02T*^\xe7\xfa\xaf\xb5b\xf6d\x8c7E\xaa\xed\x1a\xfb\xea@I\x0b\xc4[,J\x037\xce\x11T\x16"\xab\xd4l\x85,\x15\xfe\xc8\x94P\xb7\x12\x0b\x1d\x15\xe2ugj\x01\xd5\x97d\xda!Ev\xce\tv\xfeZ\xeb\x12K\xbd|_Eo\x84)\x05\x8c\xa6\xcaH\\\x9b&\xa8{\xc0\xd0\n\x9bw\xcb\xbe\xa6e\xed_\xaf\x01\x02V\xb1\xfe\xe6\x81}8\xba\xc4\x12_\x06,\xbc\x8cn;\x92\x7f:\x08\x08&\xa5\xdcNZ\xba\xe3b\xf7\xd5\xd7\x93\xc0\x94\xfb|\x96\x1c\x08qz#\x8a\xd2l4[\xc0\x88`\x1d\xf9\x89$\x7f\xba\x01\xd0\xdd\xdc\xf1\xad\x8ewH\xd5\xd1$M\xd9J\xdc\xe4p.R:\xc5g\xa9R%\xad\x1a\xe4\x01\x11\xfd9\xd82@\xbef\xab\x88\x84\x8d\xb4\t%\x1a\xa2\xf4I\xe7\xf0h+\x8e\xaf\xcc\x16H\xbdV\xb0\xd2\xab\xd6x\'\xff+\xdcy\x0f2\x0b\x92\x0b\xaeQ\xa7\x95\x1d\x050^\x94j\xa3Mm\xc5\xc6\xe5\x07+\xde_\xfc:\xbb\x16\xc7V\xc1\xda\xbf5\xdf!\x06\xe6\x9fO\x06u\xe0 \xb2\xe1\xff\x1bq\xbc]\xebx\rW\x8b\xd3:!_$\x81fN\xa0\x13\x92H\xc92\xd5\xaa\xecv\x04\x02\xfa\xe5\x1d\xd5\xad\xae\xd6\xbd\xa5\x8d\xef*\xcaM\xd5\x0b+\nc\xad&\x03U\x9d_\xf3*\x93#\x924\xc5PG.\xd0*E,\xb0!\x123\n*\xa4\xb7\xf0%\x81F!\x11\xeb\x1c\xb5\xcd\xb7\xd3\xe9\x90\xe0\xf4\xef{q|\x91o\xcc\x8d\x1a\x7fR\x8b_\x15E\x7fu\x98\x13\x9cD\x1e\xf5\xc5\xdf\x84\x83o1\x88\x93\xff\x8e\xf0\xcfjPe\x0c\xe0 \x0c\xf8e\x81\xcf\x92L\xe7\x9av\xaa4\xe0\xa9(\xf5F\xa7\nI\x99\xb4M\xf0\x91\x01[\xe3\xb516\xd0H\xa05I\xe1]\x8eS\x13\xcb\xb4\xf9\x91q\xfe\xffKnd\x0e\xe2`h\xb3\x8b%K\xc0\xb8\xeb\x01\xe9U\xd4\x05\xc6\x88\xdf\xc62\xef\x10j\x0e\xf4\x9f7\x96\x0b\xc5\xacLG\xb6\x83pO\xb4q\xc6\x88\xa3\xc5c\xf0\xa8\xbf<d@w|\x05^\xd6 \xe6\x1f;o\xfd\xc7\xebP\x8c_\r\xbb\xce\x9e\x14{?\t\x10\xf67\x01\xfc\xe4\xec\x1a\xf1>\x1eQ\xb7\x1a\x84\x9b\xc5l\x0fY\x00\x9f\xe1\x92\xb0V\xa8~\xc8\xa6\x175\x17\x13K8+\xd3k\x18\xd3\x0e\x99\xa1\xdfA\xc0\xb9\xccc\xd5sS\xe1"\x15\x06\xab\x94s\x08fc\t\x89@\x17\x1e\x01\xfc\xde}\xf6+\xe0\x1b`\x08%-\n\x89\xbc#\x8aUm\xc6\x9b\xcfg,\x8d\xc5\xb7\xb6^$H\xa91\x97P\x0eJ\x1d\xb6`\xf5\x06\x01\xc3\xfb\x13\xf3=\x80\xed\xe8\xca$qP\xd2R!\xf5\xf2n\xf3\xf9\xd5\xd9\xf5O\xd8#(C\xc4FBU\x0br=x\x18\x14\x81\x95\xb9\xa9\x84LK%\x93 s\x00\xb7-\x97->@\x12\x9d\x0c\xc6\xf1\xc8\x07\xc5\xb0D"\xacL\xe6\xf3?\x80\x93\xe9\xd9\xd5\xd5\xdb\xb3\xeb\xb3\xbf8e\xfc\xa5]8h\x8b\x81\x04\xc6\xea\xd1K\xad\x92G\xb2\xd6\xbeT\xd5P"lC\x99\xd4\xcf\x02xO\xbe\xd13\x7f\xf3\xa1l\xa2~\x9cRm\x15\xb8[\xe3\x90w\x9c\xd8\xd5\x85l\x0c\xe8\x13\x80PR\x94\xa8\x85X\x94`\x00)\n;\x19\xe8\xaf\xec\x8a\xbb\xc0\xb20\xfa\x11B\xe0\x08\x1a\x96\x15{\xf1#*B6\xbdEzDQ7\xac\x99\xa15\x1fP\xd3\x8b_t^\xdf\xf6\x82\x86\x1d^9\xf8\'\x82\x83;\xbf\xdd#\xf6\x01\x95\xb1\x87\xc30\xb6\xc3\xdc\xfc@\x907K\xf7D\xf4CpH\x82\x1c\x06\x80b|\x89\xac\xd1$\x90!\x8a\x1f\r\xe7pYux@Y0W\xca\xe7\x04\x02^\xe6\xbd\xc9\x14\xacK[\xa3\xe4\xa7(\n\xb3h\x988\x82l@\x8b}K\xe2\xb4\xf6\x7f+\x9b\xad\x13\xf3\x80\x80g)\xd2_\xce\x19?\xdd\x8d\xc2\x12\x9c\xe4\\\xa8.\x03q;\x12BC!\xab\xf5?\xc2\x0e\x0f&\xf2\xce \xf8\x90\x1b\x1d\x93&\xd0s\xd3Y\x1d\x03\xe6\xc0+^\xbeU\x0b\r\xd8~\xc3\x1et\x0e\x90\xbb\x9c\x89\xd7\xbd\x88\xb7\'Ap\xcf\x89\xdd9K\x8d\x18\xe62\xc0\xec\x89\xf8\x91K\x13\x12\xd5\x12{\xbdt\xf5T\xac\xca\n\x80\x15\x93\xf7\x927\x80\x87\\\x82\xc1\xf9i\x14G\xd6\xa6\xd1\x12\x15a\xba\x8br\xec\x13\x82\x8ar\xd8\x9c\xc9*^3!j\xc0\xfd\xd2{j\x9b\x03\xaasJ\x07\x9a$\xf0\xb0\x1c2\xd7\xa4P\x12\x04/\xa9;\x8dM\x8d4\xcf\x19G\xa7\x92|\xd2 5@Y\x1c\xb6\xf7T\xa0sv\x894\xdd\xf5U\xd0%\xfa\xf1m\xb4\x8b~\xd7\xc5S5\x81\n}\xbe\xddn\xa3n\xe22\x1f\x90\xbfc(P\x04\xc4\xd7\xe1:\x82\x14\x14\xa2\xdc\xef\xba>K[\x8b\xd2gD\x8d\x96\xac\x1aw]\xd4+\xc8\xc3L9?\t\xbb\x05\'o\xb7\x9dkV.\x88\x8ex\xfd\xed\x83m\'\xb13\xe1\x1d\'\xaf\xbf\xfd\xc2\xd5F\xd5\xf8\xf57\xdf\xbc\xfc\xf6\xe5\xeb\xef\xa9\xc7\xe9J\xce\xa2\x80\x15\xa8\xa9l\r\x01v\xb2\xd1}0\x08\x82OZ ]\x8a\x86\xde>\x1aa\xe3qn\xc6\xf1Z\xc57\xe3P\xefOl!\x1dA\x1f\x80\xbd\xd0\x1c\x83=\x15\xd7\xa5: +\xb9n\x8d|\xe1\x83\x8c\x05\x02\xee\xdf\xc41\x96\xa6\x077\x9e\xfbk\xf7&\xc4\xa1Pd-\xd0\x0c;9\xa7+_\xa3\xb0U\xe7\xfc\xba\x19.\xc1\x0f\x82\xc4S*\xb8l\x1c|\xca\x98=\x9f\x13U\xfcEx1\x9f\x8f/\xbb\x17c\x83\xba%\xac\xe8\x1f2\n\xf3v \xec\x99G\xd2O\x80\xd3\xc9F\xa2`Mz\x03\xb4G\xb4\xba\xaf2\xca\x0c\x82F6\x04{5\x90\xe1A\'\xa2\xc8\xd6\x08Fh\xbak\xdc\xc9\x85ct\x90&\x0b\x06\x1d\xbet\xe5\x16\x91j\x1f}K!\xd8b\x06\xa9R\xddB\xfe\xd8\x01\x83\x85\x1d\xe3\xb0{\x94\xe5\x82<\x7fY\x9aL\xccg\xbdN\xe2jwu\x11\x8e&{\xf1\xef\x02\xb0C$\x84\\\xbf\xa4\xa2\xc2\xa4\xd53\x9b\x91G\x7fuQ\x18\x1a\x81u#`\xfe:\xdc\xd9i \xea\xab\xecQ\xbb\xf7\xc0\xf1\xf6\x0fM$uS$\x1f\xe3\xe3\xb1S\xd3w\x13l4\x19\x1a\xa0\x85#\x11\xf2\xd1\xf1\x185k\x01g\xf4\x83<)\x965\x08y>E\nAh\xda\x8e\xa6)h\xd3hBL=D\x0f\xdf9<\xde\xa1\x89\xf1>\x04\xc8\xcdk\x99\xa2\x94\xa3\xaa*Q\x95\xd4\xa9w\x88&pFB\xd9B\xc5\x9a\xb3\xc1\x9cM\xa8\xe2\xbe\xebAJ\xe7\x16}Oj]c\xcf\xdc\x8df^\xf7y\xe9F_\x0f\xcd\xa2\x83u}\x82\x01\x83\x8f\xf2\xf5\xe4\xad\xbep\xe4\xc6!\xb9qK\xee\xe1\xf0mj\xe8\xa1\xc9\xffpY\xd6k\xac\x9b\x80\xb2<\xa3@U\xb6D&AxU\xce&H\xe8\xff\t\xa9_XdL$K:=i\xb2\xde\xd5\x1dlq\xa7#\n0\x92\xa8[\x98F\x08tf\xa6\xabD\x91\x1fQ\xf6\x99\x8a\xf4\x86\x14\xc9\xd4\xe9{\xbc\xe6F\xb8\xf2\xe3R\xbb\xd7\xa8\x071\xd4\xe7#4\xc5\x83a>\x98\x1e\xbar\xa8\x11\x82\xa1\xa3\xd5\x99+\x92\x12\x85\n\xd7\x14\xae;\xf6\xbd\x05\xd5\x00AY3\x0fj\x01\x9a\xfan\x10\x0f<\xbbex\xe2\xe8\xf9\xa1)\x05(\x89\x98\xf9\xe7\x91\xefO\xc6\x03\xe4\xadW`P`}\x19\xbdAS\x08\xdc]S\xffD(\xdc\xee\x01\xac\xdd\xa27\xdd\xab\xd6\xfe\xce\x07\x1e\x8eM\x00\x18\x93D-%\xa0 \x02\xd8F\xab\xdf\xbfP\xab\xd5w\x01Z\x81\xf1\xfeF\x1d\x8fn\x13\xeca7y\xcf(\xce\xbavb!\xfc\xc2\xdcN\xdc\xe7\xca\x06;\x8fAf2\xb0\xd7\x97o\xf6\x14\x0fm\x9e\x19Nj\xc3\x13\x9c\xa6\xc9\xe71G\x00\xe2\xdb.s\x01\xd0\\w\xca#\x9cH\xdd*@\xa9\xa1\xb1I\x94Y\xdd3\xb9_P\x8e\xdclf\xd7\x0e\x03\xbb9\xf7R\xc9\n\xa5\x10Y\xf6\xe8,I&\xbf\xaa\xcc\xd0H\xd1\x8d\x82\xec\x91\xf7\xf2\x8e(M\xd8\x10_\xf0\xf7\\\r\x1b\xf7\x92&\x03[m\xdd\x98\xb0\xdb*\xccI\xa5x\xdbt\xed\x8a*\xdc\x95,\x93T\xd9\xc6\xc3\xeeE\xb9\xf7@dbE\xd3,A\xf4BO?m\xcb\x9cv\x1a\xa0\xe87\x9a>pF\x0cgln\xc8\xa3\xf1\x82\xe3 \xa8\xc5\x10\xdf\xa0\xaa+:\xb0u\xe7\xb7\xa0\xd1N\x85\x82\xc2Z\xf2\x08m\xe7\xa7 X\xb3\xb3\x11\xb5\x8f\xc1\xe7\xa0\x7f\xe2\x0e\r\x06\xb5\xe4\xabi\x8a\x19w\x92L&m\xe6*\xa4=\x81\x9a\x8e\x01\xa0T\xe3GU\xd8\xd1E\xe7\xb1&R\xcb\xbad\xde\xa8\xb3\xe7\xd1%@61\xb9j\xeb\xf8\xadDdS\t\xe4\'\x84)\xf5\x89l\xfcn\x97\x80n3/\x97\xa95\xedZ\xbf\x80,\xf6"\x1c&\xbe`c\xf4~\xa2*\xe2\x85\xef\xe5\x02\xd8cEZkb\xcd=\x0e\xbc9\xae\xf9|\xc9\xf6\x1b\xf5\x00i=\x8d\xce.O;\xba\xbdw\xf6z\xf7,\x9by\x1a\xcc\xc8\x87\xd3\xec\x9e~R\xa5\xea\x0e1\xbd{w(\xe9\xaa\x04\x82\xf4\x9a|\x86\x1fi\x7f\xe7X\xa5r\x05&5O\xc1\xf4 U\xb2\xe4\x06U.h\xbe\x17\xd8\xab\xd1\xd1\xbb\xd5\xca\x13\n\x04\x19\xac\xf5N\xc5\x9cfYa\xe5B\xa3\nD\xc2\xaa\xd6\x89\x1b@\xf3\x0cP\xd1x\xb9)\xb0\xee\xad\x0fr\xfb[\x979\x98\xc6\x8fDc\xe0\xf3\xe2f\xf5\xb9\x13\xee\x0c\xd9\xad\xddc\xe0\xeb\x0b\xdf\xc0\x8b\x19\xd7q\x1e\xaf\x02Q\xc35\xdd\xd3\xbf\xd0\xc9\x81+\xfa|C\xebuBM4%\x9d\x12^d\xd7\xa6N\x13\x8e\x0e\xf6\xa7\xce\xdd\xe6\xbd[\x15\x1dY\xbarA\xca\xa7B\x95\x0b\x8e\xf0,f[\xea\xaaB{|\x8c\xf8\xa46\x1e\x8c\x9e0R\x04\xf6ML\\\x137\\g\x05!V\xa4\xf5\x8aB}\xd4\xcdW{\x0e\x10*\x99l\x84\x0f\x0b>\x83\x85C\xd0\xc8\xd2m\xc8>\xd1\xab\x919\xd5\x97\xeeP\x9dg\xee\x9d=I\x86\xa6\x04\xab\x8b\x84b\xf0\xae@\xa6\xbc\x81\xb4\x01\x9b\xa7t\x16\xac\x92S\x9a\x97Q\x92t R\x92\xf2\xbaI\x0c\xb8\xa5\x11\xbfO/v\xd4\xe2\x87\xad\x17\x99\xae<[\xa6\xc7g8\x91a\x1bQ\x9fu\xa3\xca\x07\n\xee\xe1\x05\x87\x16\x13n\xc2\xd2\xa7x\x90\xbf\x1dVj\xc3d\x99\xac,\xdf0\xb9\xbf\xc9C\x11s\x18\xfd\xeeq\xcf\x16\x0fD\xd9a;\xf4\x08\xec\xd9\xe4I\xc8q\xd8\xb6a\xde\x18\xd8\xb5\x17\x97\xbdpl\x0b<\xfa\xb1W\xe1ih\x97~\x9c\xf4\x16O\x1enr\xee\xc1\xfay\xa9P\x98\xdf\xbf\xda3\x0c\xac\xa4\x1d\xc4\xfcJ\x93+%\n\xfb\xe5\xcd\xd5\n\xae\xdcV+\xaaP\xd8IXY\xfe\x986\xd49> \xb3\xa1\xfc\x0b\xbb\xf3\xd8\x8c\xe3Rq\xc2\\\xec\xc4\xd5Z\xa3\xce,\xc4;<\x13\x99\x1f\xccB\\\x14\x85Iue\x10\xcc\xf4\'\'m>\x9e#\xdcQn\xab\xa5.m\x15\x92\xedu\x11w\xd8p\xc0I\xe3\x86T\xfb\xf5<\r\xbb{\x15\xa8!\xd2g7\x03T\xe8\x05\xecTQ\x01\xb1\xa2;l\xebl0\xa8O\xc5\x05\xd0\xe8\x07\x1d\x13\xe8\xf4X\xce\x08E\xe9\x8e\xc4N\x1c\xb1\xf4\x14\x96 \r\x05VG-\xd0\x80\xf9p\xeb\xd0\x93F\xc14\xcf\xcfM\xdc\x9c\xc0\xf7U\xf4\xfbFK1c\x99\xf1f\xa5\xfa\x95#Z\xcbz\xd1(\x89\xf2\x07*E\xae\xf3\x082e\xfe\xc2\xb6i\x86zuR\xd1\'\xb5\x18\xcf.~\xf4y\xfa\xd3\xec\xc7\x8b\x90\x9a$e\xfa\xa3\xd2D\x15\xa9\xd9q\xb7\xe5\x8c\x95\xdbB\x97^\xd3\xb1A\x04\x15\x15\x17\xe9\xf8\x02\rma4\xb6\xe1\xea\\Q\xaa\xef\x91\x85\xa1h\xe8\x8e\x96a\xa574\xaa\x95\xe9\x8d\xa5\xde\xfajw\xee\x0f\xa0Tj\xd5\x96\x92\x82;\xf4!\'t\xbf\'5\x0f[{%?[0C!O\xd6cIhO\xe7\x10\x1d\x04\r[\xf3g\x9d\x89\xf7h\xd6hxA\x9d\x03\x97\xab\x90Jg\x0e \xd4\x92\xec\xc7\xf5)\xdd\xaf\x14\x12i2s\xe3nd\x19\xee\x07\xe8\xe8\xd7\xd4\xbd\x00\x904\xc3qo\xef\x9eb\x0f8*\xf2\xad#\xd6\x04_x\x1c\x1a\x92m\xdb\xa9mIC\xee2\xbc\x00\xb0G\xbe&\xf8~\x8e8\xfe\xe8\x96\x00c{\xc6A/\xf9\x96&\xb1i\x83\x1a\xed\x8eSq\x1cR\xe6\x84Cy\x8e\x01\x9f\x92V!\x14\x10\x02P\x10\xd7\x8a\x0b\xd0\x84\xed\xfb\x97\x04\x1c|\xf4\xca\xc3\xf6\xc2E\xe8`Mq\xb1G\x8e\x19\xc0\x89\'\xedp\xc0B\x96U\xdb\x89\xb5!\xe8\xe7\xa9\xa1(h\xf0\xe1A\xcb\x9aoQ,v\x0e\x15\n\xd5\x0b~\x17M\xe2\xacc\xc5\x8a\xf74\xec\x91\xfe\xbc\xc74\x17\xd5\x84l\x98$\x8f,\xa5m\x93\xef\xbd\xfbh@\xe25\xb4\x88\xd6V\\]|\x08\xe5\x8cD\xff8\x89Q\xe3rv\xf6\x9e\xee\t\xd1\xdd\x16:\x89\xf7\xa3\xe0\x0f\xfa\xa6G\xf3\xe8\xdc\x80\x8b\x1f\x801G\xe2Z\xeeRS\x9e\xf4\x9dv\xadt\xd9\xb9.\xf8\xa3\x8eUr\xbe\xcba\xb0B\xaf\xec\x10\xaf\xd4uCo\x03A3\x12\x8a\x02tAh\xa3\xdc\x07\xac\xd2\xa3\x18o\x8f \xcb\xf15\xe4\xbc\xc1w\xabzg\xff\xe9dO\x9eA{q#\xfeC\xcb\xe4\xbf\xfe\xdex`Q\x02\xect\xd1\xf3A\xfa\xbd\xeb\x19\x89\xd3\x9b\x91\xe0[i!\xb7\xdcnn4@ZV-\xfe4\xe1\xefq\xcd\x0fLF\xee\x08\xa8D\xa3L\xa9>W[\x9e\xb6\xe6fs\xb7\xb4\xf5\x8d^\x92\x90\xca\x1a0OP\x1c \x8a\xf7z$\x95\r\xb4[\xa6\x08\x8ay\xe6\x1dN\r~\x96\x16\xec\xfc\x1a\x89sc\xb2\x85\xf5\x92\x87{"\xe1\xc3oa\x9b;!\x18\xb9;@^\nZ\xd8~I\xfcC^S"\x1dt\xb7(B\xa2\xf7f~\xbd\x9b\xd9\xe2\xf8jwuv\xd2\xe6\xf3\x94\xae\x0f\xb47hZE\xee-hy`1\xd8\xa8\xedmC\xc3\xd9L\xd2\xdc\x9b\x06\x8eW\x07\xac\xe8\x9e\xde\xd1\xa0\x94\xae\x08\xf2i\xa9\x8c9\xa7\xfb\x11g\x10\xf1\xdd 4\xc6f\x0b4\x1f4\xab\xe2\x13F_|\x87\xc9(\xa6\x03\xd4\x12\xe6\xf1\x8dWX\xab\xb1\xc1\xd4m\xd1\xf6\\\xdd\x99W0`$m\xde\x95\xeb\x81~`\xe8\xf3\xae\xe6tG\xb5\xfe\xd8*\x9f\xb8[\xad\x13\x12dl\x96\xe3\xd8}\xde\xab\t\xff\xacvh\x80\x12\x14\xfc\xe7Wg\x1f\xfd\x1c\xb6=i\xe1\x80\xf5\x93"ri\xfc\'\xe3\x8b\xd2W M\xc8\x1c\\\xbb?O\xa5\xb5t\xfd\x07\xdd\xc9\xdb`\xb6:\x03\xb0\xa2u\x9bN\xc5k1\xa6\xb9[\xe2N*&3\x06\xfd\xfeBjL\x18l\xcfP\xc6pY=\x9dv\xdd\x83\xed\x7f\xec\xaf\xfe\xd3\'\x97\xb3\x0bB`\xe0,\xd6\xe2\xef\x0f\x17\xd7\xcd\xeb\xfe\x9a\xcb\xb6\x9c\x9b\xb9r\x8e\x17\xf3P\xbb\xa0\x9dI\xb8\xf0{?\'\xcch\xc5/2_\xd5\xa4\x89i\xfb\xbf.\xe0\xe9\xab\xe8\xcd\xd3\x97\xfc\xf1\xa9K^=yA\xf4?X\xf2\xf5\xd3\x97\xbc\xee/\xb96\x85\x8e\xe9\xcd\xcc,\xab-\x8d\x85B_\x98\x92\xd1\x16\xa5\xe4\xd9dG\xe8\x03|"Uv\x1f\xa5\xd6Pge\xbc\xd6\x1b\xe2\x86\xd66\xd8\xf4\xe8\xb2\x99\xbfi\x12^\x97\xa5\x02~p\xd9o\x15U\xf1\x9a\xb8\xf9oPK\x03\x04\x14\x00\x00\x00\x08\x00P\x96qH\xc2t\xb1\xd6\xe2\x03\x00\x00\xb2\x0f\x00\x00\x14\x00\x00\x00EGG-INFO/SOURCES.txt\x95WKs\xdb6\x10\xbe\xebW\xe4\xd8\x1cHO\xedN:=z\x12\xe71\x1d;\x9d\xb8=c rI\xed\x08\x04P\x00\x94\xc4\xfc\xfa.\xf8\x90 \x12\xa0\xd5\x8be\xec\xf7aw\xb1\xc0>\xf8\xf1\xeb\xe3\xcb\x97\xa7\xd7\xdc\x9d\xdc\xe6\xf9\xf1\xe5\xdb\xe7\xa7\xd7\xbfs\x94\x9b\x1fO\x8f\x9f\x9e\x9fz\xf1V)g\x9d\xe1:\xd7\xdd\xa6P\xb2r`\x9d\xff\x1f\xb8\xed\x18J\xeb\xb8\x10\xfd\xfa\'\xb3\xe0\xda\x9e\'x+\x8b\x1d\x98\xbc\xd84\xf6Pd\xdb\x16E\x99]\xa4M\xb9\xd1\xfc\x00\r\xc8^\x95\xeez\xa5(qc@\x90b\xf0\xd2A[Q\xd5\x9b\xb3\xdeR\x15\xf6\xee\x99\xef\xa1B\x01\xc3\xca\xbbt\x86J8\x80P\x1aLV\xb7XB\x7f\x80\x10\xe8\r\x9e\x85W\'8K+e\x1a\xee\xecE\xb0C\xeb\x94\xe9.\x02\x94%\x9c.K\xbd\xaf\x99\x01\xabZS@\xb0\x8d\x0e\xb5S\xf2\xe1"\x18\x8f\x16P\x8c\xe2eC\x91=\x0b\xfa\x83:\xa5D@b\x0e\x1a-8Eh0l\xe9`[n\xf2\x9dk\xc4\xc4\xd8Q(\xef$w\xad\x81\xbb~\x91\xfb\xb0\xc4P:\xac\xc3b\\\xe5\x85\xb5\xcc\xad\xd0tW\xfb\x90YO\xdc\\\x9d\xf3\x8eQ\xe4\xd01\xd6\xdf\xdf\x15\xc252\x7f\x9f\xc3\x19f\xbb\x0e KeVvO\x0c\xddin,\xca:M\xb1xJ\x83\xc3\x0f\x94+Nh^\xecyM&\xc8\x1d\xbeU\xed\xba?\x01\xf9M\xdf/\xdcB5\x9a\xbb\x9b\xa8\x94em\xe1\xa3oo\xa17\xdc\xec\xc1\xdcD5\xf0o\x8b\x06\x86\x9b\xbc\x81o5\x14X\xe1\x8d\xda[\x87\xe2&\xe2\x81\x14\xa2\x92K*\x9c\x1c\x18\xb9\x12\xd7\xfe5\xbd\x89\xfb\xbf,\x19\x96\x80s\x9d\xb0k\xcc+\xd6%9\xaf\\\t\xc4\xdc\x14;<\x00\xf3!\x99A\x85\xc0\xec\xe1>\x87\x13\xcc\xa5\x1f~\x8bI\xb9i\xe2\xfc\xb9\xa8\x04M\x81\x9e;X\xe2P\xa4\x03\x91\x8f\xb2\x9c. \x90S\xa5\x8cX\xf2\xd2\xa5g^\x1a\xf7\x8c\x90\xb9h(\xf73k\x02\xb7\xf7N=08\xcd\x00\xdf\'\xfe`\xb6\xd5Z\x99\xb9\xeb\xc3\x1b\x026\x14\xde\x19\xd6\xdd\x7f\xb8$\xd95\xf0{\x02x\xf85\nX.\xcb\xad\x9a\x1b\xb0\x85A\xed\xde\xfdB-\xe4}\xee\xa8\x14/\xc1\xa5\x18\x1ddd`q|kE\xe2\x8c\xad\xc4B\x95\xc3\xdb\x99\xdffL\x16dS =R\x88\xd4\xd1\xc6m\xe4P\xd7\x19\xcaJ\xdd\xfd\xf5\xe7\x97\xec\xdb\xcb\xe7\xefQ\xf0\xf5\xfb??>\x8eCA\x0c\x1f^\x1c\xc8\xa2c\x02\xe5\xde&\x89ToL\xc7\xb4B\xe9\xd2$\xa74\x13\xbe;\'\x19?Qg\x96W\xd7\x89\xa0\x9a\x86n+\x95\x88\x13\xcc\x05\xf2y\xe0&l\xebs\x84\x91\x95U\xdc\xe8f\x15\xa7\x80\xfb\t"\xc5\xf1s\x0f\xbd\xf5u\\w\tx\x9cZ\x12\xe8|\x00\x8bQ\xea\x9a\xf9\x18&\xe0\xf5\xcd#\xcanSBoa\xfb\x06cH\x96\xd4}L\xd3\xe1;Za\xe5\xe7\xc1S#bD\x035E\x9e\xc6\xc8\xb8\x1e\xa3hv\x81\x04hi\xeaTi\x1fl\xa4n\x9e1\xa0\xa7\x9a\x02\xa7\xa18\x02\xb5Z\xd0\x98\xb7\n2?\x81E\xca\xf5\xac)\x06\xe8\xb2#.@\x9a\x00\x1d)\x99\xeb\x1d@\x90\x074JN\x03\xf8\x02\xf7\xf3u|g\x85\xa7\xf3\x8c\xb2\x00\x93\xc5x\x80\x87\x07\x90\x1d\xd1\xed\xb2\xad\x9a\'\xd6\xc8\x01sX\xdcm\xd0\x92Si\x1bR\x12Y\x17P\xe2\x99\x15\x12\xbc\x95\xc8\xab\x0f(+\xf9\x17\xb2\xe2\xd9\x130*\xaa\xd9llq\xf1\xb0\xf64\xaa\xa2P\x1b\x1a\xcb\x17E?\x1c\x7f|\x17\xf57@\xf7\xb7\x12\xc4\xd1Z\xac\x9f\x06\xacx?\x0c\t\x91t\t\xe1K1Or"\x89\x13\xa0kM1\xa4%\xb3( M\xad\xf1H\xdf\xb2z\x1c\x11#\xcc\x93;\x8e\x1f\xbb\x0b\xb0\x0f\xd78 \x0e\xad\x8fi\x83\xca\xa0\xeb\xc6t\xe5b\xf8,\xfb_[-\xd2\xf4\x00\xf4\xdd\xa9\xe8\xb3n\xfc\xb8\xec\xb5\x0c[\xa9N\xb4\\L\x81\xfa\x0fPK\x03\x04\x14\x00\x00\x00\x08\x00P\x96qH0\\\x01\x91(\x00\x00\x00&\x00\x00\x00\x16\x00\x00\x00EGG-INFO/top_level.txtKM,\xae\x8c\xcf\xcc+.I\xcc\xc9\xe1*\xc8N\x8f/J-\xce/-JN-\xe6*N-)-(\xc9\xcf\xcf)\xe6\x02\x00PK\x03\x04\x14\x00\x00\x00\x08\x00\xa1B[H\x93\x06\xd72\x03\x00\x00\x00\x01\x00\x00\x00\x11\x00\x00\x00EGG-INFO/zip-safe\xe3\x02\x00PK\x03\x04\x14\x00\x00\x00\x08\x00\xf3\x80oH\x15\xd1Q\xc0\x8cj\x00\x00W\x88\x01\x00\x19\x00\x00\x00pkg_resources/__init__.py\xcd\xbdmw\x1b\xc7\x91(\xfc\x9d\xbfbB\xad/\x00\t\x1cIv\xb2\xc9*K\'\x8a\xa4$\xba\xb1%^I\xb6\x93\xa5y\x81!0$\'\x040\xf0\x0c@\n\xce\xe6\xf9\xedO\xbdvW\xf7\xf4\x80\x94\x93\xdc\xb38>\x16\x81\xe9\xa9\xae\xee\xae\xae\xae\xf7><<<8)f\xd7\xc5e\x995e[o\x9bY\x99=?y}p\x94\xf8\x1c\x1c<\xf7\x8d\xaa6+\xb2E}Y\xcd\x8aEvQ-\xcalV\xaf6E\xb5*\xe7\xd9m\xb5\xb9\xaaV\xf0|\xcd\xa0\xc7Y\xdd\xf8\xd6\x07\xed\xf6|^5\xe5lS7\xbblsU6e}\x91g\xd9\x87\xabR_\x08p\xc9\xca\x8fkh\xdc\xfa\x1fW\xc5\xb2l\x0f6uvU\xdc\x94\x08\xa1j\xe0\xcd\xcd\x15\xfc\xaf\x81vm\t\xff\x16\x1bA$\x9bN\x1fO\xa7\xe3\xec\xe1\xaa\xde<\xccn\xaf\xe0\xc1M\xd9\xe0[\x80\x10\xa2Co\xca;\x80g\xd5\x02./\xeb\x0c\x9ag\xdb\xb6\xcc\xea6\xa7\x16\xf5\xba\x84\x06U\xbdj3\xe8yY\xac\xaa\xf5v\x01\xc0\x1cZ\x07\x84Vv^V\xabK\xc0\xa4m\x01\x81j\x05m\xb1+\x18G~p\xd0;D\x98\xcdy\xd9V\x978{\xf0\xc6m\xdd\\3\xf2\xab\xbaY\xca\x04\xb7\xbbvS.\xf5\xfdv|\x90\x97\x97\x97\xfcd\x9c\x15\xaby\xb6]\xe13\x80\xe0\x1f\xc0P^o\xb2Y\x01\x8b\xb1h\x05.\xad\xcc\xa2ZV4C\xc5\x8e::\xc8\x7f\xac\xd6\xfc\x0e\xc1\xa2\xceg\xdbvS/\xb3\x93W\'\xd9\x17O>\x87\xe9*\xe6e\x03\xc3\x879\xcc\xda\xedz]7\x1b\x1a\xdctzYn&\xf3bS\x0cG\xd3\xe9\xc1\xb2\xdc\\\xd5\xf3\xfc\xe0\x10\x88\xeb\xe0\xa2\x01\x08\x93\xc9\xc5v\xb3m\xca\xc9$\xab\x96\xf4Zq\xde\xd6\x8b\xed\xa6\x9c\xf0\xf7\x83\x03\xf9\x1d\x06\xa9\x7f\xd6\xee\xaf\xaa\xd6\xbf6\xd5\xb2\xd4\xbf\x1b\xf7\xd7f\xb7.]c\x18\x07\x0e\xc3|\x95.\xe4\x87\xdb\xa2Y\xc1\n\xb9\xf6\xed\xa6p\xcf.\xb6+\xa0\xcaz\xe1\x1e\xae\xaf/\xb7\x9bj\xe1:\xaa\xaf\xcb\x95Guy^\xbbGL\x1eu\xe3\xde\x04\xda\xb8\x80\xc5\xd3\xef\xb3z\xb1\x00*F\xfa\xf1M\xaav\xb3\xa8\xce\xf5{\xb9,\xaa\x05\x10[\xd3\x96\x0e\x0c\xacx0\x9cM\xf9qs\xdb\x14k\x9eWAO\'\x15W\x81\xff\x04\x00\x07\x9bf\xf7\xec \x83\x8f<\xc5G\x07\xe5\xc7Y\xb9\xded\xaf\xe9\xa7WMS7\xdc\xe6Av\xb2\x83U[e_\xe4\x9f\x03\xaeK \xf9\xea\xbcZT\x9b\x9d\x05\x01\xffdE\xcb\x90\x1c\x06\x13\xa5\xe46\x07\xe4\xcaf\xa5\xad\xdb\xeac\x7f\xa3\x1c\x9e\xe6\xcb\xfa\x06\xe8M\x9ao\x9b\x05L\xc6\x18\xb6\xd6z\x8c\x94H\x83x\x00\xc4\xbbF\xd2AB\x83\xdd\x08\x9b\xe3|\x87\x9b+k\x81L\xcf\xeb\x8f\xb0\x94\xdcI\xed\x01\x11\x95\xb8\xe1GO\x97\xd7\xc0|\xc6@=\xb8]\xc7\xb0i\x16\xd5\xea\x9a\x1a~\xf7\xee\xf5\x87W\x93\xf7\xdf\x9c\x9c\xbc}\xf7!;\xce>4\xdbr\xcf\x84\xad`?5\xb0\x89t+\x8c\xb3uS\x9f\x17\xe7\x8b\x1d\x00\x85\x8d\x92\xfd\xe1\xf9\xab$\xdc\xdf\xc3^,\x0fb\xac\x81\x80V8\xb9u;\xc1?\xf513\x1f\x9d\xff\x96po\x81r6\xdd\x05\xe6\x7f`\x0e\xf3e1\x03\x06\\\x02{-Z\xff\xf3\xc4\xfd,#(f\xb0\x1e\xb0\xdf7\x9b\xa6:\x87\xcd\x88\xb3\x0b4\x8b\xdc]f\x92\xc61/\x17\xc5\x0e\x99\x99L`9\xbb\x02\xee\xd7.\xdb\xdc\xf4\x1e\xc0\xcf\'\x13\x9c\xdd\xc9\xa4w\xfa\x12/\xc1\xcc\xbc\xa9Wew\\\xb2#\xfa@!5\xdc\x83\x1a\x99o"\xb9Ld\x93L&\xc3A\x924]\xd3\x1cN\x8a\x16\xb6\xec`\xf4)/\xb5p\\U\x17\x15\xbc\xfai\xef5\xe5\x0f[8\x17\x97\xe5j\xf3\x89o.\x8b\xe6\x9a\xbb\x03Fz\x91\r\xbf\x18gOF\xd9\x7f"7\xd5!L\xaa\xd5E\r?\xe1\xb3/F<s\xcb\xf6\x12&}H\x7f\xe3\xe7\xf0\xbd0u \x02\xcf\x0f\x9e\x1c!O\xb8*\xf0h\x03\x12\x9d7\xf5z]\xce\xf3\xec\xf7\xc4\xd23\x81\xdff\x87\x1e\xcem\xb5\x803\x0b\xb8Y\x86\xa7{\xceOF\xf4\x7fe\xbf9\xfe1\x04\x0cF\xb8\xc9\xe7\xe5lQ\x00\xb0\xb6^\x96\xd9\xe5\x02\xb6\xd1B\xce\x19\x02u^B\x8b\x0b\x92-\xf0\xc8\x85\xd3\xbb\x86\x97Z\xe0Q\xed\xc5\x8e\x8fr8i\x01\x91\xfc@&Q\x89\tO<\xe8n\xd2\x96\x1bG_\x07\xd0\x17\x90=\x1ck?\xff\xf9\x93\xef\x18\x9f\xe1\xbb\xed\n\xd9\x86|\x95\t\xc2\x13\x0c\xff\xfd\x06\xcf\xf2\xdb+\x18=\x89+$\xfc\x00a\xb5\xed\xb6\xe4\x93\xb2\xd0i@I\xc7\xad?\xc9\x10\xc8L\x17;\x94\t\xe8\x9cEpx\xa0B\xd7\xb9\xebCQ\x9a\xbc/7\xdb5\x9d@\xdf2\xbc\xaf+\xe0p\xc3\xfa\xfc\xafpv\x00R\xf4\x06L\x05\x9c\xa8\xb0 W@\x1am\xb9\xb8\x10l\xf1\xd3\x00\x00\xa0y`Ie3\xec\x01\x07\x0c\x04_\xca\x1d\x8c\x91\x85\xbb\xd8\x08T\x90\xdap\xb4\x068\x90V\xd5V+81W\xb3rHO\xc7\x19\xf4\xb0(M#\x83\x05=b\x0c\x81\xf2\xa8\xbdkV\x02\x0fL\xbetO\xd4\tMF0\xc0\xbe\xfc\xd7`\x7f\xfc\xcfF\xbfL\xa2_\xfe\xf0/A\xff\xf8\x9f\x8d>\xe1\xd9E\xff\xf2_3\xfb_\xfe\xb3\xd1\xbfL\xcf\xfe\xe5\xbf\x86\xf4\xbf\xfcgc\x9f&\xfd\xd5\xbff\xf2\x7f\xf6\xcf\x9e\xfcU\xcf\xe4\x97\x1b\x10\xa6\x96n\x0c\xd7\xe5\xae\xcb\xd7\x0cb\xa7\xd0\xe0\xcc\x02\x80\xb7\x9b.CD\xf6\x0b\\\x7f\xb5\x99\xd0\xa1\x00\xa7'
type(input) = <class 'bytes'>
errors = strict
type(errors) = <class 'str'>
decoding_table = ☺☻♥♦
♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼ !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~⌂\u20ac\ufffe\u201aƒ\u201e\u2026\u2020\u2021\u02c6\u2030\u0160\u2039\u0152\ufffe\u017d\ufffe\ufffe\u2018\u2019\u201c\u201d\u2022\u2013\u2014\u02dc\u2122\u0161\u203a\u0153\ufffe\u017e\u0178 ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ
type(decoding_table) = <class 'str'>
```
| conda/cli/main_list.py
<|code_start|>
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from argparse import RawDescriptionHelpFormatter
from os.path import isdir, isfile, join
from .common import (add_parser_help, add_parser_prefix, add_parser_json,
add_parser_show_channel_urls, disp_features, error_and_exit, stdout_json,
get_prefix)
from ..config import show_channel_urls, subdir, use_pip
from ..egg_info import get_egg_info
from ..install import dist2quad
from ..install import name_dist, is_linked, linked
descr = "List linked packages in a conda environment."
# Note, the formatting of this is designed to work well with help2man
examples = """
Examples:
List all packages in the current environment:
conda list
List all packages installed into the environment 'myenv':
conda list -n myenv
Save packages for future use:
conda list --export > package-list.txt
Reinstall packages from an export file:
conda create -n myenv --file package-list.txt
"""
log = logging.getLogger(__name__)
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'list',
description=descr,
help=descr,
formatter_class=RawDescriptionHelpFormatter,
epilog=examples,
add_help=False,
)
add_parser_help(p)
add_parser_prefix(p)
add_parser_json(p)
add_parser_show_channel_urls(p)
p.add_argument(
'-c', "--canonical",
action="store_true",
help="Output canonical names of packages only. Implies --no-pip. ",
)
p.add_argument(
'-f', "--full-name",
action="store_true",
help="Only search for full names, i.e., ^<regex>$.",
)
p.add_argument(
"--explicit",
action="store_true",
help="List explicitly all installed conda packaged with URL "
"(output may be used by conda create --file).",
)
p.add_argument(
"--md5",
action="store_true",
help="Add MD5 hashsum when using --explicit",
)
p.add_argument(
'-e', "--export",
action="store_true",
help="Output requirement string only (output may be used by "
" conda create --file).",
)
p.add_argument(
'-r', "--revisions",
action="store_true",
help="List the revision history and exit.",
)
p.add_argument(
"--no-pip",
action="store_false",
default=True,
dest="pip",
help="Do not include pip-only installed packages.")
p.add_argument(
'regex',
action="store",
nargs="?",
help="List only packages matching this regular expression.",
)
p.set_defaults(func=execute)
def print_export_header():
print('# This file may be used to create an environment using:')
print('# $ conda create --name <env> --file <this file>')
print('# platform: %s' % subdir)
def get_packages(installed, regex):
pat = re.compile(regex, re.I) if regex else None
for dist in sorted(installed, key=str.lower):
name = name_dist(dist)
if pat and pat.search(name) is None:
continue
yield dist
def list_packages(prefix, installed, regex=None, format='human',
show_channel_urls=show_channel_urls):
res = 1
result = []
for dist in get_packages(installed, regex):
res = 0
if format == 'canonical':
result.append(dist)
continue
if format == 'export':
result.append('='.join(dist2quad(dist)[:3]))
continue
try:
# Returns None if no meta-file found (e.g. pip install)
info = is_linked(prefix, dist)
features = set(info.get('features', '').split())
disp = '%(name)-25s %(version)-15s %(build)15s' % info
disp += ' %s' % disp_features(features)
schannel = info.get('schannel')
if show_channel_urls or show_channel_urls is None and schannel != 'defaults':
disp += ' %s' % schannel
result.append(disp)
except (AttributeError, IOError, KeyError, ValueError) as e:
log.debug(str(e))
result.append('%-25s %-15s %s' % dist2quad(dist)[:3])
return res, result
def print_packages(prefix, regex=None, format='human', piplist=False,
json=False, show_channel_urls=show_channel_urls):
if not isdir(prefix):
error_and_exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create an environment before listing its packages.""" %
prefix,
json=json,
error_type="NoEnvironmentFound")
if not json:
if format == 'human':
print('# packages in environment at %s:' % prefix)
print('#')
if format == 'export':
print_export_header()
installed = linked(prefix)
if piplist and use_pip and format == 'human':
installed.update(get_egg_info(prefix))
exitcode, output = list_packages(prefix, installed, regex, format=format,
show_channel_urls=show_channel_urls)
if not json:
print('\n'.join(output))
else:
stdout_json(output)
return exitcode
def print_explicit(prefix, add_md5=False):
import json
if not isdir(prefix):
error_and_exit("Error: environment does not exist: %s" % prefix)
print_export_header()
print("@EXPLICIT")
meta_dir = join(prefix, 'conda-meta')
for fn in sorted(os.listdir(meta_dir)):
if not fn.endswith('.json'):
continue
with open(join(meta_dir, fn)) as fi:
meta = json.load(fi)
url = meta.get('url')
def format_url():
return '%s%s-%s-%s.tar.bz2' % (meta['channel'], meta['name'],
meta['version'], meta['build'])
# two cases in which we want to try to format the url:
# 1. There is no url key in the metadata
# 2. The url key in the metadata is referencing a file on the local
# machine
if not url:
try:
url = format_url()
except KeyError:
# Declare failure :-(
print('# no URL for: %s' % fn[:-5])
continue
if url.startswith('file'):
try:
url = format_url()
except KeyError:
# declare failure and allow the url to be the file from which it was
# originally installed
continue
md5 = meta.get('md5')
print(url + ('#%s' % md5 if add_md5 and md5 else ''))
def execute(args, parser):
prefix = get_prefix(args)
regex = args.regex
if args.full_name:
regex = r'^%s$' % regex
if args.revisions:
from conda.history import History
h = History(prefix)
if isfile(h.path):
if not args.json:
h.print_log()
else:
stdout_json(h.object_log())
else:
error_and_exit("No revision log found: %s\n" % h.path,
json=args.json,
error_type="NoRevisionLog")
return
if args.explicit:
print_explicit(prefix, args.md5)
return
if args.canonical:
format = 'canonical'
elif args.export:
format = 'export'
else:
format = 'human'
if args.json:
format = 'canonical'
exitcode = print_packages(prefix, regex, format, piplist=args.pip,
json=args.json,
show_channel_urls=args.show_channel_urls)
sys.exit(exitcode)
<|code_end|>
conda/egg_info.py
<|code_start|>
"""
Functions related to core conda functionality that relates to manually
installed Python packages, e.g. using "python setup.py install", or "pip".
"""
from __future__ import absolute_import, division, print_function
import os
import re
import sys
from os.path import isdir, isfile, join
from .compat import itervalues
from .install import linked_data
from .misc import rel_path
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
if sys.platform == 'win32':
stdlib_dir = 'Lib'
else:
py_ver = info['version'][:3]
stdlib_dir = 'lib/python%s' % py_ver
return join(stdlib_dir, 'site-packages')
return None
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
if not fn.endswith(('.egg', '.egg-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
join(path, 'EGG-INFO', 'PKG-INFO')]:
if isfile(path2):
yield path2
pat = re.compile(r'(\w+):\s*(\S+)', re.I)
def parse_egg_info(path):
"""
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
for line in open(path):
line = line.strip()
m = pat.match(line)
if m:
key = m.group(1).lower()
info[key] = m.group(2)
try:
return '%(name)s-%(version)s-<egg_info>' % info
except KeyError:
pass
return None
def get_egg_info(prefix, all_pkgs=False):
"""
Return a set of canonical names of all Python packages (in `prefix`),
by inspecting the .egg-info files inside site-packages.
By default, only untracked (not conda installed) .egg-info files are
considered. Setting `all_pkgs` to True changes this.
"""
installed_pkgs = linked_data(prefix)
sp_dir = get_site_packages_dir(installed_pkgs)
if sp_dir is None:
return set()
conda_files = set()
for info in itervalues(installed_pkgs):
conda_files.update(info.get('files', []))
res = set()
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
dist = parse_egg_info(path)
if dist:
res.add(dist)
return res
if __name__ == '__main__':
from pprint import pprint
pprint(get_egg_info(sys.prefix))
<|code_end|>
| conda/cli/main_list.py
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from argparse import RawDescriptionHelpFormatter
from os.path import isdir, isfile, join
from .common import (add_parser_help, add_parser_prefix, add_parser_json,
add_parser_show_channel_urls, disp_features, error_and_exit, stdout_json,
get_prefix)
from ..config import show_channel_urls, subdir, use_pip
from ..egg_info import get_egg_info
from ..install import dist2quad
from ..install import name_dist, is_linked, linked
descr = "List linked packages in a conda environment."
# Note, the formatting of this is designed to work well with help2man
examples = """
Examples:
List all packages in the current environment:
conda list
List all packages installed into the environment 'myenv':
conda list -n myenv
Save packages for future use:
conda list --export > package-list.txt
Reinstall packages from an export file:
conda create -n myenv --file package-list.txt
"""
log = logging.getLogger(__name__)
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'list',
description=descr,
help=descr,
formatter_class=RawDescriptionHelpFormatter,
epilog=examples,
add_help=False,
)
add_parser_help(p)
add_parser_prefix(p)
add_parser_json(p)
add_parser_show_channel_urls(p)
p.add_argument(
'-c', "--canonical",
action="store_true",
help="Output canonical names of packages only. Implies --no-pip. ",
)
p.add_argument(
'-f', "--full-name",
action="store_true",
help="Only search for full names, i.e., ^<regex>$.",
)
p.add_argument(
"--explicit",
action="store_true",
help="List explicitly all installed conda packaged with URL "
"(output may be used by conda create --file).",
)
p.add_argument(
"--md5",
action="store_true",
help="Add MD5 hashsum when using --explicit",
)
p.add_argument(
'-e', "--export",
action="store_true",
help="Output requirement string only (output may be used by "
" conda create --file).",
)
p.add_argument(
'-r', "--revisions",
action="store_true",
help="List the revision history and exit.",
)
p.add_argument(
"--no-pip",
action="store_false",
default=True,
dest="pip",
help="Do not include pip-only installed packages.")
p.add_argument(
'regex',
action="store",
nargs="?",
help="List only packages matching this regular expression.",
)
p.set_defaults(func=execute)
def print_export_header():
print('# This file may be used to create an environment using:')
print('# $ conda create --name <env> --file <this file>')
print('# platform: %s' % subdir)
def get_packages(installed, regex):
pat = re.compile(regex, re.I) if regex else None
for dist in sorted(installed, key=lambda x: x.lower()):
name = name_dist(dist)
if pat and pat.search(name) is None:
continue
yield dist
def list_packages(prefix, installed, regex=None, format='human',
show_channel_urls=show_channel_urls):
res = 1
result = []
for dist in get_packages(installed, regex):
res = 0
if format == 'canonical':
result.append(dist)
continue
if format == 'export':
result.append('='.join(dist2quad(dist)[:3]))
continue
try:
# Returns None if no meta-file found (e.g. pip install)
info = is_linked(prefix, dist)
features = set(info.get('features', '').split())
disp = '%(name)-25s %(version)-15s %(build)15s' % info
disp += ' %s' % disp_features(features)
schannel = info.get('schannel')
if show_channel_urls or show_channel_urls is None and schannel != 'defaults':
disp += ' %s' % schannel
result.append(disp)
except (AttributeError, IOError, KeyError, ValueError) as e:
log.debug(str(e))
result.append('%-25s %-15s %15s' % dist2quad(dist)[:3])
return res, result
def print_packages(prefix, regex=None, format='human', piplist=False,
json=False, show_channel_urls=show_channel_urls):
if not isdir(prefix):
error_and_exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create an environment before listing its packages.""" %
prefix,
json=json,
error_type="NoEnvironmentFound")
if not json:
if format == 'human':
print('# packages in environment at %s:' % prefix)
print('#')
if format == 'export':
print_export_header()
installed = linked(prefix)
if piplist and use_pip and format == 'human':
installed.update(get_egg_info(prefix))
exitcode, output = list_packages(prefix, installed, regex, format=format,
show_channel_urls=show_channel_urls)
if not json:
print('\n'.join(output))
else:
stdout_json(output)
return exitcode
def print_explicit(prefix, add_md5=False):
import json
if not isdir(prefix):
error_and_exit("Error: environment does not exist: %s" % prefix)
print_export_header()
print("@EXPLICIT")
meta_dir = join(prefix, 'conda-meta')
for fn in sorted(os.listdir(meta_dir)):
if not fn.endswith('.json'):
continue
with open(join(meta_dir, fn)) as fi:
meta = json.load(fi)
url = meta.get('url')
def format_url():
return '%s%s-%s-%s.tar.bz2' % (meta['channel'], meta['name'],
meta['version'], meta['build'])
# two cases in which we want to try to format the url:
# 1. There is no url key in the metadata
# 2. The url key in the metadata is referencing a file on the local
# machine
if not url:
try:
url = format_url()
except KeyError:
# Declare failure :-(
print('# no URL for: %s' % fn[:-5])
continue
if url.startswith('file'):
try:
url = format_url()
except KeyError:
# declare failure and allow the url to be the file from which it was
# originally installed
continue
md5 = meta.get('md5')
print(url + ('#%s' % md5 if add_md5 and md5 else ''))
def execute(args, parser):
prefix = get_prefix(args)
regex = args.regex
if args.full_name:
regex = r'^%s$' % regex
if args.revisions:
from conda.history import History
h = History(prefix)
if isfile(h.path):
if not args.json:
h.print_log()
else:
stdout_json(h.object_log())
else:
error_and_exit("No revision log found: %s\n" % h.path,
json=args.json,
error_type="NoRevisionLog")
return
if args.explicit:
print_explicit(prefix, args.md5)
return
if args.canonical:
format = 'canonical'
elif args.export:
format = 'export'
else:
format = 'human'
if args.json:
format = 'canonical'
exitcode = print_packages(prefix, regex, format, piplist=args.pip,
json=args.json,
show_channel_urls=args.show_channel_urls)
sys.exit(exitcode)
conda/egg_info.py
"""
Functions related to core conda functionality that relates to manually
installed Python packages, e.g. using "python setup.py install", or "pip".
"""
from __future__ import absolute_import, division, print_function
from io import open
import os
import re
import sys
from os.path import isdir, isfile, join
from .compat import itervalues
from .install import linked_data
from .misc import rel_path
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
if sys.platform == 'win32':
stdlib_dir = 'Lib'
else:
py_ver = info['version'][:3]
stdlib_dir = 'lib/python%s' % py_ver
return join(stdlib_dir, 'site-packages')
return None
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
if not fn.endswith(('.egg', '.egg-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
join(path, 'EGG-INFO', 'PKG-INFO')]:
if isfile(path2):
yield path2
pat = re.compile(r'(\w+):\s*(\S+)', re.I)
def parse_egg_info(path):
"""
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
for line in open(path, encoding='utf-8'):
line = line.strip()
m = pat.match(line)
if m:
key = m.group(1).lower()
info[key] = m.group(2)
try:
return '%(name)s-%(version)s-<egg_info>' % info
except KeyError:
pass
return None
def get_egg_info(prefix, all_pkgs=False):
"""
Return a set of canonical names of all Python packages (in `prefix`),
by inspecting the .egg-info files inside site-packages.
By default, only untracked (not conda installed) .egg-info files are
considered. Setting `all_pkgs` to True changes this.
"""
installed_pkgs = linked_data(prefix)
sp_dir = get_site_packages_dir(installed_pkgs)
if sp_dir is None:
return set()
conda_files = set()
for info in itervalues(installed_pkgs):
conda_files.update(info.get('files', []))
res = set()
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
try:
dist = parse_egg_info(path)
except UnicodeDecodeError:
dist = None
if dist:
res.add(dist)
return res
if __name__ == '__main__':
from pprint import pprint
pprint(get_egg_info(sys.prefix))
| conda/cli/main_list.py
--- a/conda/cli/main_list.py
+++ b/conda/cli/main_list.py
@@ -115,8 +115,7 @@ def print_export_header():
def get_packages(installed, regex):
pat = re.compile(regex, re.I) if regex else None
-
- for dist in sorted(installed, key=str.lower):
+ for dist in sorted(installed, key=lambda x: x.lower()):
name = name_dist(dist)
if pat and pat.search(name) is None:
continue
@@ -150,7 +149,7 @@ def list_packages(prefix, installed, regex=None, format='human',
result.append(disp)
except (AttributeError, IOError, KeyError, ValueError) as e:
log.debug(str(e))
- result.append('%-25s %-15s %s' % dist2quad(dist)[:3])
+ result.append('%-25s %-15s %15s' % dist2quad(dist)[:3])
return res, result
conda/egg_info.py
--- a/conda/egg_info.py
+++ b/conda/egg_info.py
@@ -4,6 +4,7 @@
"""
from __future__ import absolute_import, division, print_function
+from io import open
import os
import re
import sys
@@ -46,7 +47,7 @@ def parse_egg_info(path):
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
- for line in open(path):
+ for line in open(path, encoding='utf-8'):
line = line.strip()
m = pat.match(line)
if m:
@@ -79,7 +80,10 @@ def get_egg_info(prefix, all_pkgs=False):
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
- dist = parse_egg_info(path)
+ try:
+ dist = parse_egg_info(path)
+ except UnicodeDecodeError:
+ dist = None
if dist:
res.add(dist)
return res |
Conda install from file fails in version 4.1
I've recently installed linux-32 conda on CentOS 6.2. I confirmed this on linux-64 version as well.
If I run: `conda install ./FILE_1.tar.bz2`, I always get the error:
`Fetching package metadata ...Error: Could not find URL: file:///root/dir1/
`
If i downgrade to conda 4.0.8, it works fine.
Here's the command with the added `--debug` option:
```
$ conda --debug install ./FILE_1.tar.bz2
DEBUG:conda.fetch:channel_urls={'file:///root/dir1/': ('file:///root', 0)}
Fetching package metadata ...INFO:stdoutlog:Fetching package metadata ...
DEBUG:requests.packages.urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None)
DEBUG:conda.fetch:Could not find URL: file:///root/dir1/
Error: Could not find URL: file:///root/dir1/
```
| conda/config.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from collections import OrderedDict
from os.path import abspath, expanduser, isfile, isdir, join
from platform import machine
from .compat import urlparse, string_types
from .utils import try_write, yaml_load
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
default_python = '%d.%d' % sys.version_info[:2]
# CONDA_FORCE_32BIT should only be used when running conda-build (in order
# to build 32-bit packages on a 64-bit system). We don't want to mention it
# in the documentation, because it can mess up a lot of things.
force_32bit = bool(int(os.getenv('CONDA_FORCE_32BIT', 0)))
# ----- operating system and architecture -----
_sys_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
'openbsd5': 'openbsd',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if force_32bit:
bits = 32
if platform == 'linux' and machine() in non_x86_linux_machines:
arch_name = machine()
subdir = 'linux-%s' % arch_name
else:
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
subdir = '%s-%d' % (platform, bits)
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
ADD_BINSTAR_TOKEN = True
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'auto_update_conda',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
'root_dir',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
local_channel = []
rc = root_dir = root_writable = BINSTAR_TOKEN_PAT = channel_alias = None
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc_(path):
if not path or not isfile(path):
return {}
with open(path) as f:
return yaml_load(f) or {}
sys_rc = load_condarc_(sys_rc_path) if isfile(sys_rc_path) else {}
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
# ~/envs for backwards compatibility
lst = ['~/.conda/envs', '~/envs'] + lst
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs32' if force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
# ----- channels -----
# Note, get_*_urls() return unnormalized urls.
def get_local_urls(clear_cache=True):
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
if clear_cache:
from .fetch import fetch_index
fetch_index.cache = {}
if local_channel:
return local_channel
from os.path import exists
from .utils import url_path
try:
from conda_build.config import croot
if exists(croot):
local_channel.append(url_path(croot))
except ImportError:
pass
return local_channel
def get_default_urls():
if 'default_channels' in sys_rc:
return sys_rc['default_channels']
return ['https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro']
def get_rc_urls():
if rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
return url and urlparse.urlparse(url).scheme != ""
def binstar_channel_alias(channel_alias):
if channel_alias.startswith('file:/'):
return channel_alias
if rc.get('add_anaconda_token',
rc.get('add_binstar_token', ADD_BINSTAR_TOKEN)):
try:
from binstar_client.utils import get_binstar
bs = get_binstar()
bs_domain = bs.domain.replace("api", "conda").rstrip('/') + '/'
if channel_alias.startswith(bs_domain) and bs.token:
channel_alias += 't/%s/' % bs.token
except ImportError:
log.debug("Could not import binstar")
pass
except Exception as e:
stderrlog.info("Warning: could not import binstar_client (%s)" % e)
return channel_alias
def hide_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1t/<TOKEN>/', url)
def remove_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
def prioritize_channels(channels):
newchans = OrderedDict()
lastchan = None
priority = 0
for channel in channels:
channel = channel.rstrip('/') + '/'
if channel not in newchans:
channel_s = canonical_channel_name(channel.rsplit('/', 2)[0])
priority += channel_s != lastchan
newchans[channel] = (channel_s, priority)
lastchan = channel_s
return newchans
def normalize_urls(urls, platform=None, offline_only=False):
defaults = tuple(x.rstrip('/') + '/' for x in get_default_urls())
alias = None
newurls = []
while urls:
url = urls[0]
urls = urls[1:]
if url == "system" and rc_path:
urls = get_rc_urls() + urls
continue
elif url in ("defaults", "system"):
t_urls = defaults
elif url == "local":
t_urls = get_local_urls()
else:
t_urls = [url]
for url0 in t_urls:
url0 = url0.rstrip('/')
if not is_url(url0):
if alias is None:
alias = binstar_channel_alias(channel_alias)
url0 = alias + url0
if offline_only and not url0.startswith('file:'):
continue
for plat in (platform or subdir, 'noarch'):
newurls.append('%s/%s/' % (url0, plat))
return newurls
def get_channel_urls(platform=None, offline=False):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
elif 'channels' in rc:
base_urls = ['system']
else:
base_urls = ['defaults']
res = normalize_urls(base_urls, platform, offline)
return res
def canonical_channel_name(channel):
if channel is None:
return '<unknown>'
channel = remove_binstar_tokens(channel).rstrip('/')
if any(channel.startswith(i) for i in get_default_urls()):
return 'defaults'
elif any(channel.startswith(i) for i in get_local_urls(clear_cache=False)):
return 'local'
elif channel.startswith('http://filer/'):
return 'filer'
elif channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1]
elif channel.startswith('http:/'):
channel2 = 'https' + channel[4:]
channel3 = canonical_channel_name(channel2)
return channel3 if channel3 != channel2 else channel
else:
return channel
def url_channel(url):
if url is None:
return None, '<unknown>'
channel = url.rsplit('/', 2)[0]
schannel = canonical_channel_name(channel)
return channel, schannel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = ['system']
else:
base_urls = ['default']
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers') or {}
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
def load_condarc(path):
rc = load_condarc_(path)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT',
rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
globals().update(locals())
envs_dirs = [abspath(expanduser(p)) for p in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if not sys_rc.get('allow_other_channels', True) and 'channel_alias' in sys_rc:
channel_alias = sys_rc['channel_alias']
channel_alias = channel_alias.rstrip('/')
_binstar = r'((:?%s|binstar\.org|anaconda\.org)/?)(t/[0-9a-zA-Z\-<>]{4,})/'
BINSTAR_TOKEN_PAT = re.compile(_binstar % re.escape(channel_alias))
channel_alias = BINSTAR_TOKEN_PAT.sub(r'\1', channel_alias + '/')
offline = bool(rc.get('offline', False))
add_pip_as_python_dependency = bool(rc.get('add_pip_as_python_dependency', True))
always_yes = bool(rc.get('always_yes', False))
always_copy = bool(rc.get('always_copy', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('anaconda_upload',
rc.get('binstar_upload', None)) # None means ask
allow_softlinks = bool(rc.get('allow_softlinks', True))
auto_update_conda = bool(rc.get('auto_update_conda', rc.get('self_update', True)))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = rc.get('show_channel_urls', None) # None means letting conda decide
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
update_dependencies = bool(rc.get('update_dependencies', True))
channel_priority = bool(rc.get('channel_priority', True))
# ssl_verify can be a boolean value or a filename string
ssl_verify = rc.get('ssl_verify', True)
try:
track_features = rc.get('track_features', [])
if isinstance(track_features, string_types):
track_features = track_features.split()
track_features = set(track_features)
except KeyError:
track_features = None
globals().update(locals())
return rc
load_condarc(rc_path)
<|code_end|>
conda/misc.py
<|code_start|>
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url, dist2filename)
from .compat import iteritems, itervalues
from .config import is_url, url_channel, root_dir, envs_dirs, subdir
from .fetch import fetch_index
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, url_path as utils_url_path
from .api import get_index
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
linked = {name_dist(dist): dist for dist in install_linked(prefix)}
fetch_args = fetch_args or {}
index = index or {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
sys.exit('Error: file not found: %s' % join(url_p, fn))
url_p = utils_url_path(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None
if url.startswith('file://'):
prefix = cached_url(url)
# If not, determine the channel name from the URL
if prefix is None:
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
if pkg_path and (md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
if fn not in index or index[fn].get('not_fetched'):
channels[url_p + '/'] = (schannel, 0)
actions[FETCH].append(dist)
verifies.append((dist + '.tar.bz2', md5))
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index.update(fetch_index(channels, **fetch_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if 'url' not in info]
notfound = []
if unknowns:
fetch_args = fetch_args or {}
index = get_index(**fetch_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = name_dist(dist)
fn = dist2filename(dist)
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[dist] = r.index[dist]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, subdir, notfound)
raise RuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist + '.tar.bz2'
if fkey not in index:
info['not_fetched'] = True
index[fkey] = info
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, fetch_args=fetch_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if 'channel' in info and 'icon' in info:
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
<|code_end|>
| conda/config.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from collections import OrderedDict
from os.path import abspath, expanduser, isfile, isdir, join
from platform import machine
from .compat import urlparse, string_types
from .utils import try_write, yaml_load
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
default_python = '%d.%d' % sys.version_info[:2]
# CONDA_FORCE_32BIT should only be used when running conda-build (in order
# to build 32-bit packages on a 64-bit system). We don't want to mention it
# in the documentation, because it can mess up a lot of things.
force_32bit = bool(int(os.getenv('CONDA_FORCE_32BIT', 0)))
# ----- operating system and architecture -----
_sys_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
'openbsd5': 'openbsd',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if force_32bit:
bits = 32
if platform == 'linux' and machine() in non_x86_linux_machines:
arch_name = machine()
subdir = 'linux-%s' % arch_name
else:
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
subdir = '%s-%d' % (platform, bits)
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
ADD_BINSTAR_TOKEN = True
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'auto_update_conda',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
'root_dir',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
local_channel = []
rc = root_dir = root_writable = BINSTAR_TOKEN_PAT = channel_alias = None
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc_(path):
if not path or not isfile(path):
return {}
with open(path) as f:
return yaml_load(f) or {}
sys_rc = load_condarc_(sys_rc_path) if isfile(sys_rc_path) else {}
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
# ~/envs for backwards compatibility
lst = ['~/.conda/envs', '~/envs'] + lst
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs32' if force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
# ----- channels -----
# Note, get_*_urls() return unnormalized urls.
def get_local_urls(clear_cache=True):
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
if clear_cache:
from .fetch import fetch_index
fetch_index.cache = {}
if local_channel:
return local_channel
from os.path import exists
from .utils import url_path
try:
from conda_build.config import croot
if exists(croot):
local_channel.append(url_path(croot))
except ImportError:
pass
return local_channel
def get_default_urls():
if 'default_channels' in sys_rc:
return sys_rc['default_channels']
return ['https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro']
def get_rc_urls():
if rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
return url and urlparse.urlparse(url).scheme != ""
def binstar_channel_alias(channel_alias):
if channel_alias.startswith('file:/'):
return channel_alias
if rc.get('add_anaconda_token',
rc.get('add_binstar_token', ADD_BINSTAR_TOKEN)):
try:
from binstar_client.utils import get_binstar
bs = get_binstar()
bs_domain = bs.domain.replace("api", "conda").rstrip('/') + '/'
if channel_alias.startswith(bs_domain) and bs.token:
channel_alias += 't/%s/' % bs.token
except ImportError:
log.debug("Could not import binstar")
pass
except Exception as e:
stderrlog.info("Warning: could not import binstar_client (%s)" % e)
return channel_alias
def hide_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1t/<TOKEN>/', url)
def remove_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
def prioritize_channels(channels):
newchans = OrderedDict()
lastchan = None
priority = 0
for channel in channels:
channel = channel.rstrip('/') + '/'
if channel not in newchans:
channel_s = canonical_channel_name(channel.rsplit('/', 2)[0])
priority += channel_s != lastchan
newchans[channel] = (channel_s, priority)
lastchan = channel_s
return newchans
def normalize_urls(urls, platform=None, offline_only=False):
defaults = tuple(x.rstrip('/') + '/' for x in get_default_urls())
alias = None
newurls = []
while urls:
url = urls[0]
urls = urls[1:]
if url == "system" and rc_path:
urls = get_rc_urls() + urls
continue
elif url in ("defaults", "system"):
t_urls = defaults
elif url == "local":
t_urls = get_local_urls()
else:
t_urls = [url]
for url0 in t_urls:
url0 = url0.rstrip('/')
if not is_url(url0):
if alias is None:
alias = binstar_channel_alias(channel_alias)
url0 = alias + url0
if offline_only and not url0.startswith('file:'):
continue
for plat in (platform or subdir, 'noarch'):
newurls.append('%s/%s/' % (url0, plat))
return newurls
def get_channel_urls(platform=None, offline=False):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
elif 'channels' in rc:
base_urls = ['system']
else:
base_urls = ['defaults']
res = normalize_urls(base_urls, platform, offline)
return res
def canonical_channel_name(channel):
if channel is None:
return '<unknown>'
channel = remove_binstar_tokens(channel).rstrip('/')
if any(channel.startswith(i) for i in get_default_urls()):
return 'defaults'
elif any(channel.startswith(i) for i in get_local_urls(clear_cache=False)):
return 'local'
elif channel.startswith('http://filer/'):
return 'filer'
elif channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1]
elif channel.startswith('http:/'):
channel2 = 'https' + channel[4:]
channel3 = canonical_channel_name(channel2)
return channel3 if channel3 != channel2 else channel
else:
return channel
def url_channel(url):
if url is None:
return None, '<unknown>'
channel = url.rsplit('/', 2)[0]
schannel = canonical_channel_name(channel)
if url.startswith('file://') and schannel != 'local':
channel = schannel = url.rsplit('/', 1)[0]
return channel, schannel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = ['system']
else:
base_urls = ['default']
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers') or {}
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
def load_condarc(path):
rc = load_condarc_(path)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT',
rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
globals().update(locals())
envs_dirs = [abspath(expanduser(p)) for p in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if not sys_rc.get('allow_other_channels', True) and 'channel_alias' in sys_rc:
channel_alias = sys_rc['channel_alias']
channel_alias = channel_alias.rstrip('/')
_binstar = r'((:?%s|binstar\.org|anaconda\.org)/?)(t/[0-9a-zA-Z\-<>]{4,})/'
BINSTAR_TOKEN_PAT = re.compile(_binstar % re.escape(channel_alias))
channel_alias = BINSTAR_TOKEN_PAT.sub(r'\1', channel_alias + '/')
offline = bool(rc.get('offline', False))
add_pip_as_python_dependency = bool(rc.get('add_pip_as_python_dependency', True))
always_yes = bool(rc.get('always_yes', False))
always_copy = bool(rc.get('always_copy', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('anaconda_upload',
rc.get('binstar_upload', None)) # None means ask
allow_softlinks = bool(rc.get('allow_softlinks', True))
auto_update_conda = bool(rc.get('auto_update_conda', rc.get('self_update', True)))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = rc.get('show_channel_urls', None) # None means letting conda decide
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
update_dependencies = bool(rc.get('update_dependencies', True))
channel_priority = bool(rc.get('channel_priority', True))
# ssl_verify can be a boolean value or a filename string
ssl_verify = rc.get('ssl_verify', True)
try:
track_features = rc.get('track_features', [])
if isinstance(track_features, string_types):
track_features = track_features.split()
track_features = set(track_features)
except KeyError:
track_features = None
globals().update(locals())
return rc
load_condarc(rc_path)
conda/misc.py
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url, dist2filename)
from .compat import iteritems, itervalues
from .config import is_url, url_channel, root_dir, envs_dirs, subdir
from .fetch import fetch_index
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, url_path as utils_url_path
from .api import get_index
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK
linked = {name_dist(dist): dist for dist in install_linked(prefix)}
fetch_args = fetch_args or {}
index = index or {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
sys.exit('Error: file not found: %s' % join(url_p, fn))
url_p = utils_url_path(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None
if url.startswith('file://'):
prefix = cached_url(url)
# If not, determine the channel name from the URL
if prefix is None:
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
is_file = fn.startswith('file://')
# Add file to index so we'll see it later
if is_file:
index[fn] = {'fn': dist2filename(fn), 'url': url, 'md5': None}
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
# Also remove explicit tarballs from cache
if pkg_path and (is_file or md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
if not is_file:
if fn not in index or index[fn].get('not_fetched'):
channels[url_p + '/'] = (schannel, 0)
verifies.append((dist + '.tar.bz2', md5))
actions[FETCH].append(dist)
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index.update(fetch_index(channels, **fetch_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if 'url' not in info]
notfound = []
if unknowns:
fetch_args = fetch_args or {}
index = get_index(**fetch_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = name_dist(dist)
fn = dist2filename(dist)
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[dist] = r.index[dist]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, subdir, notfound)
raise RuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist + '.tar.bz2'
if fkey not in index:
info['not_fetched'] = True
index[fkey] = info
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, fetch_args=fetch_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if 'channel' in info and 'icon' in info:
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
| conda/config.py
--- a/conda/config.py
+++ b/conda/config.py
@@ -297,6 +297,8 @@ def url_channel(url):
return None, '<unknown>'
channel = url.rsplit('/', 2)[0]
schannel = canonical_channel_name(channel)
+ if url.startswith('file://') and schannel != 'local':
+ channel = schannel = url.rsplit('/', 1)[0]
return channel, schannel
# ----- allowed channels -----
conda/misc.py
--- a/conda/misc.py
+++ b/conda/misc.py
@@ -76,12 +76,17 @@ def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None,
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
+ is_file = fn.startswith('file://')
+ # Add file to index so we'll see it later
+ if is_file:
+ index[fn] = {'fn': dist2filename(fn), 'url': url, 'md5': None}
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
- if pkg_path and (md5 and md5_file(pkg_path) != md5):
+ # Also remove explicit tarballs from cache
+ if pkg_path and (is_file or md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
@@ -96,10 +101,11 @@ def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None,
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
- if fn not in index or index[fn].get('not_fetched'):
- channels[url_p + '/'] = (schannel, 0)
+ if not is_file:
+ if fn not in index or index[fn].get('not_fetched'):
+ channels[url_p + '/'] = (schannel, 0)
+ verifies.append((dist + '.tar.bz2', md5))
actions[FETCH].append(dist)
- verifies.append((dist + '.tar.bz2', md5))
actions[EXTRACT].append(dist)
# unlink any installed package with that name |
TypeError resulting from conda create
I've just installed anaconda2 on a CentOS 6.8 instance. Now I'm trying to create a new conda environment but am receiving this error:
```
[ebrown@AWS-SYD-AL-T2MIC-SWM-P-ANACONDA01 ~]$ conda create --name testenv python
Fetching package metadata .......
Solving package specifications .............
An unexpected error has occurred, please consider sending the
following traceback to the conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Include the output of the command 'conda info' in your report.
Traceback (most recent call last):
File "/anaconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 120, in main
args_func(args, p)
File "/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 127, in args_func
args.func(args, p)
File "/anaconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 57, in execute
install(args, parser, 'create')
File "/anaconda/lib/python2.7/site-packages/conda/cli/install.py", line 315, in install
shortcuts=shortcuts)
File "/anaconda/lib/python2.7/site-packages/conda/plan.py", line 461, in install_actions
shortcuts=shortcuts)
File "/anaconda/lib/python2.7/site-packages/conda/plan.py", line 333, in ensure_linked_actions
actions[inst.LINK].append(dist, LINK_COPY, shortcuts)
TypeError: append() takes exactly one argument (3 given)
```
Here is my conda info:
```
[ebrown@AWS-SYD-AL-T2MIC-SWM-P-ANACONDA01 ~]$ conda info
Current conda install:
platform : linux-64
conda version : 4.1.0
conda-build version : 1.20.0
python version : 2.7.11.final.0
requests version : 2.9.1
root environment : /anaconda (writable)
default environment : /anaconda
envs directories : /anaconda/envs
package cache : /anaconda/pkgs
channel URLs : https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : None
is foreign system : False
```
I'm at a loss as to why this is happening, can anyone help?
Thanks
| conda/plan.py
<|code_start|>
"""
Handle the planning of installs and their execution.
NOTE:
conda.install uses canonical package names in its interface functions,
whereas conda.resolve uses package filenames, as those are used as index
keys. We try to keep fixes to this "impedance mismatch" local to this
module.
"""
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from logging import getLogger
from os.path import abspath, basename, dirname, join, exists
from . import instructions as inst
from .config import (always_copy as config_always_copy,
show_channel_urls as config_show_channel_urls,
root_dir, allow_softlinks, default_python, auto_update_conda,
track_features, foreign, url_channel, canonical_channel_name)
from .exceptions import CondaException
from .history import History
from .install import (dist2quad, LINK_HARD, link_name_map, name_dist, is_fetched,
is_extracted, is_linked, find_new_location, dist2filename, LINK_COPY,
LINK_SOFT, try_hard_link, rm_rf)
from .resolve import MatchSpec, Resolve, Package
from .utils import md5_file, human_bytes
# For backwards compatibility
log = getLogger(__name__)
def print_dists(dists_extras):
fmt = " %-27s|%17s"
print(fmt % ('package', 'build'))
print(fmt % ('-' * 27, '-' * 17))
for dist, extra in dists_extras:
dist = dist2quad(dist)
line = fmt % (dist[0]+'-'+dist[1], dist[2])
if extra:
line += extra
print(line)
def display_actions(actions, index, show_channel_urls=None):
if show_channel_urls is None:
show_channel_urls = config_show_channel_urls
def channel_str(rec):
if 'schannel' in rec:
return rec['schannel']
if 'url' in rec:
return url_channel(rec['url'])[1]
if 'channel' in rec:
return canonical_channel_name(rec['channel'])
return '<unknown>'
def channel_filt(s):
if show_channel_urls is False:
return ''
if show_channel_urls is None and s == 'defaults':
return ''
return s
if actions.get(inst.FETCH):
print("\nThe following packages will be downloaded:\n")
disp_lst = []
for dist in actions[inst.FETCH]:
info = index[dist + '.tar.bz2']
extra = '%15s' % human_bytes(info['size'])
schannel = channel_filt(channel_str(info))
if schannel:
extra += ' ' + schannel
disp_lst.append((dist, extra))
print_dists(disp_lst)
if index and len(actions[inst.FETCH]) > 1:
num_bytes = sum(index[dist + '.tar.bz2']['size']
for dist in actions[inst.FETCH])
print(' ' * 4 + '-' * 60)
print(" " * 43 + "Total: %14s" % human_bytes(num_bytes))
# package -> [oldver-oldbuild, newver-newbuild]
packages = defaultdict(lambda: list(('', '')))
features = defaultdict(lambda: list(('', '')))
channels = defaultdict(lambda: list(('', '')))
records = defaultdict(lambda: list((None, None)))
linktypes = {}
for arg in actions.get(inst.LINK, []):
dist, lt, shortcuts = inst.split_linkarg(arg)
fkey = dist + '.tar.bz2'
rec = index[fkey]
pkg = rec['name']
channels[pkg][1] = channel_str(rec)
packages[pkg][1] = rec['version'] + '-' + rec['build']
records[pkg][1] = Package(fkey, rec)
linktypes[pkg] = lt
features[pkg][1] = rec.get('features', '')
for arg in actions.get(inst.UNLINK, []):
dist, lt, shortcuts = inst.split_linkarg(arg)
fkey = dist + '.tar.bz2'
rec = index.get(fkey)
if rec is None:
pkg, ver, build, schannel = dist2quad(dist)
rec = dict(name=pkg, version=ver, build=build, channel=None,
schannel='<unknown>',
build_number=int(build) if build.isdigit() else 0)
pkg = rec['name']
channels[pkg][0] = channel_str(rec)
packages[pkg][0] = rec['version'] + '-' + rec['build']
records[pkg][0] = Package(fkey, rec)
features[pkg][0] = rec.get('features', '')
# Put a minimum length here---. .--For the :
# v v
new = {p for p in packages if not packages[p][0]}
removed = {p for p in packages if not packages[p][1]}
# New packages are actually listed in the left-hand column,
# so let's move them over there
for pkg in new:
for var in (packages, features, channels, records):
var[pkg] = var[pkg][::-1]
if packages:
maxpkg = max(len(p) for p in packages) + 1
maxoldver = max(len(p[0]) for p in packages.values())
maxnewver = max(len(p[1]) for p in packages.values())
maxoldfeatures = max(len(p[0]) for p in features.values())
maxnewfeatures = max(len(p[1]) for p in features.values())
maxoldchannels = max(len(channel_filt(p[0])) for p in channels.values())
maxnewchannels = max(len(channel_filt(p[1])) for p in channels.values())
updated = set()
downgraded = set()
oldfmt = {}
newfmt = {}
for pkg in packages:
# That's right. I'm using old-style string formatting to generate a
# string with new-style string formatting.
oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
if maxoldchannels:
oldfmt[pkg] += ' {channels[0]:<%s}' % maxoldchannels
if features[pkg][0]:
oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
lt = linktypes.get(pkg, LINK_HARD)
lt = '' if lt == LINK_HARD else (' (%s)' % link_name_map[lt])
if pkg in removed or pkg in new:
oldfmt[pkg] += lt
continue
newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
if maxnewchannels:
newfmt[pkg] += ' {channels[1]:<%s}' % maxnewchannels
if features[pkg][1]:
newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
newfmt[pkg] += lt
P0 = records[pkg][0]
P1 = records[pkg][1]
try:
# <= here means that unchanged packages will be put in updated
newer = ((P0.name, P0.norm_version, P0.build_number) <=
(P1.name, P1.norm_version, P1.build_number))
except TypeError:
newer = ((P0.name, P0.version, P0.build_number) <=
(P1.name, P1.version, P1.build_number))
if newer or str(P1.version) == 'custom':
updated.add(pkg)
else:
downgraded.add(pkg)
arrow = ' --> '
lead = ' ' * 4
def format(s, pkg):
chans = [channel_filt(c) for c in channels[pkg]]
return lead + s.format(pkg=pkg + ':', vers=packages[pkg],
channels=chans, features=features[pkg])
if new:
print("\nThe following NEW packages will be INSTALLED:\n")
for pkg in sorted(new):
# New packages have been moved to the "old" column for display
print(format(oldfmt[pkg], pkg))
if removed:
print("\nThe following packages will be REMOVED:\n")
for pkg in sorted(removed):
print(format(oldfmt[pkg], pkg))
if updated:
print("\nThe following packages will be UPDATED:\n")
for pkg in sorted(updated):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
if downgraded:
print("\nThe following packages will be DOWNGRADED:\n")
for pkg in sorted(downgraded):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
print()
def nothing_to_do(actions):
for op in inst.action_codes:
if actions.get(op):
return False
return True
def add_unlink(actions, dist):
if inst.UNLINK not in actions:
actions[inst.UNLINK] = []
actions[inst.UNLINK].append(dist)
def plan_from_actions(actions):
if 'op_order' in actions and actions['op_order']:
op_order = actions['op_order']
else:
op_order = inst.action_codes
assert inst.PREFIX in actions and actions[inst.PREFIX]
res = [('PREFIX', '%s' % actions[inst.PREFIX])]
if sys.platform == 'win32':
# Always link/unlink menuinst first on windows in case a subsequent
# package tries to import it to create/remove a shortcut
for op in (inst.UNLINK, inst.FETCH, inst.EXTRACT, inst.LINK):
if op in actions:
pkgs = []
for pkg in actions[op]:
if 'menuinst' in pkg:
res.append((op, pkg))
else:
pkgs.append(pkg)
actions[op] = pkgs
for op in op_order:
if op not in actions:
continue
if not actions[op]:
continue
if '_' not in op:
res.append((inst.PRINT, '%sing packages ...' % op.capitalize()))
elif op.startswith('RM_'):
res.append((inst.PRINT, 'Pruning %s packages from the cache ...' % op[3:].lower()))
if op in inst.progress_cmds:
res.append((inst.PROGRESS, '%d' % len(actions[op])))
for arg in actions[op]:
res.append((op, arg))
return res
# force_linked_actions has now been folded into this function, and is enabled by
# supplying an index and setting force=True
def ensure_linked_actions(dists, prefix, index=None, force=False,
always_copy=False, shortcuts=False):
actions = defaultdict(list)
actions[inst.PREFIX] = prefix
actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED,
inst.EXTRACT, inst.UNLINK, inst.LINK)
for dist in dists:
fetched_in = is_fetched(dist)
extracted_in = is_extracted(dist)
if fetched_in and index is not None:
# Test the MD5, and possibly re-fetch
fn = dist + '.tar.bz2'
try:
if md5_file(fetched_in) != index[fn]['md5']:
# RM_FETCHED now removes the extracted data too
actions[inst.RM_FETCHED].append(dist)
# Re-fetch, re-extract, re-link
fetched_in = extracted_in = None
force = True
except KeyError:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if not force and is_linked(prefix, dist):
continue
if extracted_in and force:
# Always re-extract in the force case
actions[inst.RM_EXTRACTED].append(dist)
extracted_in = None
# Otherwise we need to extract, and possibly fetch
if not extracted_in and not fetched_in:
# If there is a cache conflict, clean it up
fetched_in, conflict = find_new_location(dist)
fetched_in = join(fetched_in, dist2filename(dist))
if conflict is not None:
actions[inst.RM_FETCHED].append(conflict)
actions[inst.FETCH].append(dist)
if not extracted_in:
actions[inst.EXTRACT].append(dist)
fetched_dist = extracted_in or fetched_in[:-8]
fetched_dir = dirname(fetched_dist)
try:
# Determine what kind of linking is necessary
if not extracted_in:
# If not already extracted, create some dummy
# data to test with
rm_rf(fetched_dist)
ppath = join(fetched_dist, 'info')
os.makedirs(ppath)
index_json = join(ppath, 'index.json')
with open(index_json, 'w'):
pass
if config_always_copy or always_copy:
lt = LINK_COPY
elif try_hard_link(fetched_dir, prefix, dist):
lt = LINK_HARD
elif allow_softlinks and sys.platform != 'win32':
lt = LINK_SOFT
else:
lt = LINK_COPY
actions[inst.LINK].append('%s %d %s' % (dist, lt, shortcuts))
except (OSError, IOError):
actions[inst.LINK].append(dist, LINK_COPY, shortcuts)
finally:
if not extracted_in:
# Remove the dummy data
try:
rm_rf(fetched_dist)
except (OSError, IOError):
pass
return actions
# -------------------------------------------------------------------
def is_root_prefix(prefix):
return abspath(prefix) == abspath(root_dir)
def add_defaults_to_specs(r, linked, specs, update=False):
# TODO: This should use the pinning mechanism. But don't change the API:
# cas uses it.
if r.explicit(specs):
return
log.debug('H0 specs=%r' % specs)
linked = [d if d.endswith('.tar.bz2') else d + '.tar.bz2' for d in linked]
names_linked = {r.index[fn]['name']: fn for fn in linked if fn in r.index}
mspecs = list(map(MatchSpec, specs))
for name, def_ver in [('python', default_python),
# Default version required, but only used for Python
('lua', None)]:
if any(s.name == name and not s.is_simple() for s in mspecs):
# if any of the specifications mention the Python/Numpy version,
# we don't need to add the default spec
log.debug('H1 %s' % name)
continue
depends_on = {s for s in mspecs if r.depends_on(s, name)}
any_depends_on = bool(depends_on)
log.debug('H2 %s %s' % (name, any_depends_on))
if not any_depends_on:
# if nothing depends on Python/Numpy AND the Python/Numpy is not
# specified, we don't need to add the default spec
log.debug('H2A %s' % name)
continue
if any(s.is_exact() for s in depends_on):
# If something depends on Python/Numpy, but the spec is very
# explicit, we also don't need to add the default spec
log.debug('H2B %s' % name)
continue
if name in names_linked:
# if Python/Numpy is already linked, we add that instead of the
# default
log.debug('H3 %s' % name)
fkey = names_linked[name]
info = r.index[fkey]
ver = '.'.join(info['version'].split('.', 2)[:2])
spec = '%s %s* (target=%s)' % (info['name'], ver, fkey)
specs.append(spec)
continue
if name == 'python' and def_ver.startswith('3.'):
# Don't include Python 3 in the specs if this is the Python 3
# version of conda.
continue
if def_ver is not None:
specs.append('%s %s*' % (name, def_ver))
log.debug('HF specs=%r' % specs)
def get_pinned_specs(prefix):
pinfile = join(prefix, 'conda-meta', 'pinned')
if not exists(pinfile):
return []
with open(pinfile) as f:
return [i for i in f.read().strip().splitlines() if i and not i.strip().startswith('#')]
def install_actions(prefix, index, specs, force=False, only_names=None, always_copy=False,
pinned=True, minimal_hint=False, update_deps=True, prune=False,
shortcuts=False):
r = Resolve(index)
linked = r.installed
if auto_update_conda and is_root_prefix(prefix):
specs.append('conda')
if pinned:
pinned_specs = get_pinned_specs(prefix)
log.debug("Pinned specs=%s" % pinned_specs)
specs += pinned_specs
must_have = {}
if track_features:
specs.extend(x + '@' for x in track_features)
pkgs = r.install(specs, linked, update_deps=update_deps)
for fn in pkgs:
dist = fn[:-8]
name = name_dist(dist)
if not name or only_names and name not in only_names:
continue
must_have[name] = dist
if is_root_prefix(prefix):
for name in foreign:
if name in must_have:
del must_have[name]
elif basename(prefix).startswith('_'):
# anything (including conda) can be installed into environments
# starting with '_', mainly to allow conda-build to build conda
pass
else:
# disallow conda from being installed into all other environments
if 'conda' in must_have or 'conda-env' in must_have:
sys.exit("Error: 'conda' can only be installed into the "
"root environment")
smh = r.dependency_sort(must_have)
actions = ensure_linked_actions(
smh, prefix,
index=index if force else None,
force=force, always_copy=always_copy,
shortcuts=shortcuts)
if actions[inst.LINK]:
actions[inst.SYMLINK_CONDA] = [root_dir]
for fkey in sorted(linked):
dist = fkey[:-8]
name = name_dist(dist)
replace_existing = name in must_have and dist != must_have[name]
prune_it = prune and dist not in smh
if replace_existing or prune_it:
add_unlink(actions, dist)
return actions
def remove_actions(prefix, specs, index, force=False, pinned=True):
r = Resolve(index)
linked = r.installed
if force:
mss = list(map(MatchSpec, specs))
nlinked = {r.package_name(fn): fn[:-8]
for fn in linked
if not any(r.match(ms, fn) for ms in mss)}
else:
add_defaults_to_specs(r, linked, specs, update=True)
nlinked = {r.package_name(fn): fn[:-8] for fn in r.remove(specs, linked)}
if pinned:
pinned_specs = get_pinned_specs(prefix)
log.debug("Pinned specs=%s" % pinned_specs)
linked = {r.package_name(fn): fn[:-8] for fn in linked}
actions = ensure_linked_actions(r.dependency_sort(nlinked), prefix)
for old_fn in reversed(r.dependency_sort(linked)):
dist = old_fn + '.tar.bz2'
name = r.package_name(dist)
if old_fn == nlinked.get(name, ''):
continue
if pinned and any(r.match(ms, dist) for ms in pinned_specs):
msg = "Cannot remove %s becaue it is pinned. Use --no-pin to override."
raise RuntimeError(msg % dist)
if name == 'conda' and name not in nlinked:
if any(s.split(' ', 1)[0] == 'conda' for s in specs):
sys.exit("Error: 'conda' cannot be removed from the root environment")
else:
msg = ("Error: this 'remove' command cannot be executed because it\n"
"would require removing 'conda' dependencies")
sys.exit(msg)
add_unlink(actions, old_fn)
return actions
def remove_features_actions(prefix, index, features):
r = Resolve(index)
linked = r.installed
actions = defaultdict(list)
actions[inst.PREFIX] = prefix
_linked = [d + '.tar.bz2' for d in linked]
to_link = []
for dist in sorted(linked):
fn = dist + '.tar.bz2'
if fn not in index:
continue
if r.track_features(fn).intersection(features):
add_unlink(actions, dist)
if r.features(fn).intersection(features):
add_unlink(actions, dist)
subst = r.find_substitute(_linked, features, fn)
if subst:
to_link.append(subst[:-8])
if to_link:
actions.update(ensure_linked_actions(to_link, prefix))
return actions
def revert_actions(prefix, revision=-1):
h = History(prefix)
h.update()
try:
state = h.get_state(revision)
except IndexError:
sys.exit("Error: no such revision: %d" % revision)
curr = h.get_state()
if state == curr:
return {}
actions = ensure_linked_actions(state, prefix)
for dist in curr - state:
add_unlink(actions, dist)
return actions
# ---------------------------- EXECUTION --------------------------
def execute_actions(actions, index=None, verbose=False):
plan = plan_from_actions(actions)
with History(actions[inst.PREFIX]):
inst.execute_instructions(plan, index, verbose)
def update_old_plan(old_plan):
"""
Update an old plan object to work with
`conda.instructions.execute_instructions`
"""
plan = []
for line in old_plan:
if line.startswith('#'):
continue
if ' ' not in line:
raise CondaException(
"The instruction '%s' takes at least one argument" % line
)
instruction, arg = line.split(' ', 1)
plan.append((instruction, arg))
return plan
def execute_plan(old_plan, index=None, verbose=False):
"""
Deprecated: This should `conda.instructions.execute_instructions` instead
"""
plan = update_old_plan(old_plan)
inst.execute_instructions(plan, index, verbose)
if __name__ == '__main__':
# for testing new revert_actions() only
from pprint import pprint
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
<|code_end|>
| conda/plan.py
"""
Handle the planning of installs and their execution.
NOTE:
conda.install uses canonical package names in its interface functions,
whereas conda.resolve uses package filenames, as those are used as index
keys. We try to keep fixes to this "impedance mismatch" local to this
module.
"""
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from logging import getLogger
from os.path import abspath, basename, dirname, join, exists
from . import instructions as inst
from .config import (always_copy as config_always_copy,
show_channel_urls as config_show_channel_urls,
root_dir, allow_softlinks, default_python, auto_update_conda,
track_features, foreign, url_channel, canonical_channel_name)
from .exceptions import CondaException
from .history import History
from .install import (dist2quad, LINK_HARD, link_name_map, name_dist, is_fetched,
is_extracted, is_linked, find_new_location, dist2filename, LINK_COPY,
LINK_SOFT, try_hard_link, rm_rf)
from .resolve import MatchSpec, Resolve, Package
from .utils import md5_file, human_bytes
# For backwards compatibility
log = getLogger(__name__)
def print_dists(dists_extras):
fmt = " %-27s|%17s"
print(fmt % ('package', 'build'))
print(fmt % ('-' * 27, '-' * 17))
for dist, extra in dists_extras:
dist = dist2quad(dist)
line = fmt % (dist[0]+'-'+dist[1], dist[2])
if extra:
line += extra
print(line)
def display_actions(actions, index, show_channel_urls=None):
if show_channel_urls is None:
show_channel_urls = config_show_channel_urls
def channel_str(rec):
if 'schannel' in rec:
return rec['schannel']
if 'url' in rec:
return url_channel(rec['url'])[1]
if 'channel' in rec:
return canonical_channel_name(rec['channel'])
return '<unknown>'
def channel_filt(s):
if show_channel_urls is False:
return ''
if show_channel_urls is None and s == 'defaults':
return ''
return s
if actions.get(inst.FETCH):
print("\nThe following packages will be downloaded:\n")
disp_lst = []
for dist in actions[inst.FETCH]:
info = index[dist + '.tar.bz2']
extra = '%15s' % human_bytes(info['size'])
schannel = channel_filt(channel_str(info))
if schannel:
extra += ' ' + schannel
disp_lst.append((dist, extra))
print_dists(disp_lst)
if index and len(actions[inst.FETCH]) > 1:
num_bytes = sum(index[dist + '.tar.bz2']['size']
for dist in actions[inst.FETCH])
print(' ' * 4 + '-' * 60)
print(" " * 43 + "Total: %14s" % human_bytes(num_bytes))
# package -> [oldver-oldbuild, newver-newbuild]
packages = defaultdict(lambda: list(('', '')))
features = defaultdict(lambda: list(('', '')))
channels = defaultdict(lambda: list(('', '')))
records = defaultdict(lambda: list((None, None)))
linktypes = {}
for arg in actions.get(inst.LINK, []):
dist, lt, shortcuts = inst.split_linkarg(arg)
fkey = dist + '.tar.bz2'
rec = index[fkey]
pkg = rec['name']
channels[pkg][1] = channel_str(rec)
packages[pkg][1] = rec['version'] + '-' + rec['build']
records[pkg][1] = Package(fkey, rec)
linktypes[pkg] = lt
features[pkg][1] = rec.get('features', '')
for arg in actions.get(inst.UNLINK, []):
dist, lt, shortcuts = inst.split_linkarg(arg)
fkey = dist + '.tar.bz2'
rec = index.get(fkey)
if rec is None:
pkg, ver, build, schannel = dist2quad(dist)
rec = dict(name=pkg, version=ver, build=build, channel=None,
schannel='<unknown>',
build_number=int(build) if build.isdigit() else 0)
pkg = rec['name']
channels[pkg][0] = channel_str(rec)
packages[pkg][0] = rec['version'] + '-' + rec['build']
records[pkg][0] = Package(fkey, rec)
features[pkg][0] = rec.get('features', '')
# Put a minimum length here---. .--For the :
# v v
new = {p for p in packages if not packages[p][0]}
removed = {p for p in packages if not packages[p][1]}
# New packages are actually listed in the left-hand column,
# so let's move them over there
for pkg in new:
for var in (packages, features, channels, records):
var[pkg] = var[pkg][::-1]
if packages:
maxpkg = max(len(p) for p in packages) + 1
maxoldver = max(len(p[0]) for p in packages.values())
maxnewver = max(len(p[1]) for p in packages.values())
maxoldfeatures = max(len(p[0]) for p in features.values())
maxnewfeatures = max(len(p[1]) for p in features.values())
maxoldchannels = max(len(channel_filt(p[0])) for p in channels.values())
maxnewchannels = max(len(channel_filt(p[1])) for p in channels.values())
updated = set()
downgraded = set()
oldfmt = {}
newfmt = {}
for pkg in packages:
# That's right. I'm using old-style string formatting to generate a
# string with new-style string formatting.
oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
if maxoldchannels:
oldfmt[pkg] += ' {channels[0]:<%s}' % maxoldchannels
if features[pkg][0]:
oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
lt = linktypes.get(pkg, LINK_HARD)
lt = '' if lt == LINK_HARD else (' (%s)' % link_name_map[lt])
if pkg in removed or pkg in new:
oldfmt[pkg] += lt
continue
newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
if maxnewchannels:
newfmt[pkg] += ' {channels[1]:<%s}' % maxnewchannels
if features[pkg][1]:
newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
newfmt[pkg] += lt
P0 = records[pkg][0]
P1 = records[pkg][1]
try:
# <= here means that unchanged packages will be put in updated
newer = ((P0.name, P0.norm_version, P0.build_number) <=
(P1.name, P1.norm_version, P1.build_number))
except TypeError:
newer = ((P0.name, P0.version, P0.build_number) <=
(P1.name, P1.version, P1.build_number))
if newer or str(P1.version) == 'custom':
updated.add(pkg)
else:
downgraded.add(pkg)
arrow = ' --> '
lead = ' ' * 4
def format(s, pkg):
chans = [channel_filt(c) for c in channels[pkg]]
return lead + s.format(pkg=pkg + ':', vers=packages[pkg],
channels=chans, features=features[pkg])
if new:
print("\nThe following NEW packages will be INSTALLED:\n")
for pkg in sorted(new):
# New packages have been moved to the "old" column for display
print(format(oldfmt[pkg], pkg))
if removed:
print("\nThe following packages will be REMOVED:\n")
for pkg in sorted(removed):
print(format(oldfmt[pkg], pkg))
if updated:
print("\nThe following packages will be UPDATED:\n")
for pkg in sorted(updated):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
if downgraded:
print("\nThe following packages will be DOWNGRADED:\n")
for pkg in sorted(downgraded):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
print()
def nothing_to_do(actions):
for op in inst.action_codes:
if actions.get(op):
return False
return True
def add_unlink(actions, dist):
if inst.UNLINK not in actions:
actions[inst.UNLINK] = []
actions[inst.UNLINK].append(dist)
def plan_from_actions(actions):
if 'op_order' in actions and actions['op_order']:
op_order = actions['op_order']
else:
op_order = inst.action_codes
assert inst.PREFIX in actions and actions[inst.PREFIX]
res = [('PREFIX', '%s' % actions[inst.PREFIX])]
if sys.platform == 'win32':
# Always link/unlink menuinst first on windows in case a subsequent
# package tries to import it to create/remove a shortcut
for op in (inst.UNLINK, inst.FETCH, inst.EXTRACT, inst.LINK):
if op in actions:
pkgs = []
for pkg in actions[op]:
if 'menuinst' in pkg:
res.append((op, pkg))
else:
pkgs.append(pkg)
actions[op] = pkgs
for op in op_order:
if op not in actions:
continue
if not actions[op]:
continue
if '_' not in op:
res.append((inst.PRINT, '%sing packages ...' % op.capitalize()))
elif op.startswith('RM_'):
res.append((inst.PRINT, 'Pruning %s packages from the cache ...' % op[3:].lower()))
if op in inst.progress_cmds:
res.append((inst.PROGRESS, '%d' % len(actions[op])))
for arg in actions[op]:
res.append((op, arg))
return res
# force_linked_actions has now been folded into this function, and is enabled by
# supplying an index and setting force=True
def ensure_linked_actions(dists, prefix, index=None, force=False,
always_copy=False, shortcuts=False):
actions = defaultdict(list)
actions[inst.PREFIX] = prefix
actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED,
inst.EXTRACT, inst.UNLINK, inst.LINK)
for dist in dists:
fetched_in = is_fetched(dist)
extracted_in = is_extracted(dist)
if fetched_in and index is not None:
# Test the MD5, and possibly re-fetch
fn = dist + '.tar.bz2'
try:
if md5_file(fetched_in) != index[fn]['md5']:
# RM_FETCHED now removes the extracted data too
actions[inst.RM_FETCHED].append(dist)
# Re-fetch, re-extract, re-link
fetched_in = extracted_in = None
force = True
except KeyError:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if not force and is_linked(prefix, dist):
continue
if extracted_in and force:
# Always re-extract in the force case
actions[inst.RM_EXTRACTED].append(dist)
extracted_in = None
# Otherwise we need to extract, and possibly fetch
if not extracted_in and not fetched_in:
# If there is a cache conflict, clean it up
fetched_in, conflict = find_new_location(dist)
fetched_in = join(fetched_in, dist2filename(dist))
if conflict is not None:
actions[inst.RM_FETCHED].append(conflict)
actions[inst.FETCH].append(dist)
if not extracted_in:
actions[inst.EXTRACT].append(dist)
fetched_dist = extracted_in or fetched_in[:-8]
fetched_dir = dirname(fetched_dist)
try:
# Determine what kind of linking is necessary
if not extracted_in:
# If not already extracted, create some dummy
# data to test with
rm_rf(fetched_dist)
ppath = join(fetched_dist, 'info')
os.makedirs(ppath)
index_json = join(ppath, 'index.json')
with open(index_json, 'w'):
pass
if config_always_copy or always_copy:
lt = LINK_COPY
elif try_hard_link(fetched_dir, prefix, dist):
lt = LINK_HARD
elif allow_softlinks and sys.platform != 'win32':
lt = LINK_SOFT
else:
lt = LINK_COPY
actions[inst.LINK].append('%s %d %s' % (dist, lt, shortcuts))
except (OSError, IOError):
actions[inst.LINK].append('%s %d %s' % (dist, LINK_COPY, shortcuts))
finally:
if not extracted_in:
# Remove the dummy data
try:
rm_rf(fetched_dist)
except (OSError, IOError):
pass
return actions
# -------------------------------------------------------------------
def is_root_prefix(prefix):
return abspath(prefix) == abspath(root_dir)
def add_defaults_to_specs(r, linked, specs, update=False):
# TODO: This should use the pinning mechanism. But don't change the API:
# cas uses it.
if r.explicit(specs):
return
log.debug('H0 specs=%r' % specs)
linked = [d if d.endswith('.tar.bz2') else d + '.tar.bz2' for d in linked]
names_linked = {r.index[fn]['name']: fn for fn in linked if fn in r.index}
mspecs = list(map(MatchSpec, specs))
for name, def_ver in [('python', default_python),
# Default version required, but only used for Python
('lua', None)]:
if any(s.name == name and not s.is_simple() for s in mspecs):
# if any of the specifications mention the Python/Numpy version,
# we don't need to add the default spec
log.debug('H1 %s' % name)
continue
depends_on = {s for s in mspecs if r.depends_on(s, name)}
any_depends_on = bool(depends_on)
log.debug('H2 %s %s' % (name, any_depends_on))
if not any_depends_on:
# if nothing depends on Python/Numpy AND the Python/Numpy is not
# specified, we don't need to add the default spec
log.debug('H2A %s' % name)
continue
if any(s.is_exact() for s in depends_on):
# If something depends on Python/Numpy, but the spec is very
# explicit, we also don't need to add the default spec
log.debug('H2B %s' % name)
continue
if name in names_linked:
# if Python/Numpy is already linked, we add that instead of the
# default
log.debug('H3 %s' % name)
fkey = names_linked[name]
info = r.index[fkey]
ver = '.'.join(info['version'].split('.', 2)[:2])
spec = '%s %s* (target=%s)' % (info['name'], ver, fkey)
specs.append(spec)
continue
if name == 'python' and def_ver.startswith('3.'):
# Don't include Python 3 in the specs if this is the Python 3
# version of conda.
continue
if def_ver is not None:
specs.append('%s %s*' % (name, def_ver))
log.debug('HF specs=%r' % specs)
def get_pinned_specs(prefix):
pinfile = join(prefix, 'conda-meta', 'pinned')
if not exists(pinfile):
return []
with open(pinfile) as f:
return [i for i in f.read().strip().splitlines() if i and not i.strip().startswith('#')]
def install_actions(prefix, index, specs, force=False, only_names=None, always_copy=False,
pinned=True, minimal_hint=False, update_deps=True, prune=False,
shortcuts=False):
r = Resolve(index)
linked = r.installed
if auto_update_conda and is_root_prefix(prefix):
specs.append('conda')
if pinned:
pinned_specs = get_pinned_specs(prefix)
log.debug("Pinned specs=%s" % pinned_specs)
specs += pinned_specs
must_have = {}
if track_features:
specs.extend(x + '@' for x in track_features)
pkgs = r.install(specs, linked, update_deps=update_deps)
for fn in pkgs:
dist = fn[:-8]
name = name_dist(dist)
if not name or only_names and name not in only_names:
continue
must_have[name] = dist
if is_root_prefix(prefix):
for name in foreign:
if name in must_have:
del must_have[name]
elif basename(prefix).startswith('_'):
# anything (including conda) can be installed into environments
# starting with '_', mainly to allow conda-build to build conda
pass
else:
# disallow conda from being installed into all other environments
if 'conda' in must_have or 'conda-env' in must_have:
sys.exit("Error: 'conda' can only be installed into the "
"root environment")
smh = r.dependency_sort(must_have)
actions = ensure_linked_actions(
smh, prefix,
index=index if force else None,
force=force, always_copy=always_copy,
shortcuts=shortcuts)
if actions[inst.LINK]:
actions[inst.SYMLINK_CONDA] = [root_dir]
for fkey in sorted(linked):
dist = fkey[:-8]
name = name_dist(dist)
replace_existing = name in must_have and dist != must_have[name]
prune_it = prune and dist not in smh
if replace_existing or prune_it:
add_unlink(actions, dist)
return actions
def remove_actions(prefix, specs, index, force=False, pinned=True):
r = Resolve(index)
linked = r.installed
if force:
mss = list(map(MatchSpec, specs))
nlinked = {r.package_name(fn): fn[:-8]
for fn in linked
if not any(r.match(ms, fn) for ms in mss)}
else:
add_defaults_to_specs(r, linked, specs, update=True)
nlinked = {r.package_name(fn): fn[:-8] for fn in r.remove(specs, linked)}
if pinned:
pinned_specs = get_pinned_specs(prefix)
log.debug("Pinned specs=%s" % pinned_specs)
linked = {r.package_name(fn): fn[:-8] for fn in linked}
actions = ensure_linked_actions(r.dependency_sort(nlinked), prefix)
for old_fn in reversed(r.dependency_sort(linked)):
dist = old_fn + '.tar.bz2'
name = r.package_name(dist)
if old_fn == nlinked.get(name, ''):
continue
if pinned and any(r.match(ms, dist) for ms in pinned_specs):
msg = "Cannot remove %s becaue it is pinned. Use --no-pin to override."
raise RuntimeError(msg % dist)
if name == 'conda' and name not in nlinked:
if any(s.split(' ', 1)[0] == 'conda' for s in specs):
sys.exit("Error: 'conda' cannot be removed from the root environment")
else:
msg = ("Error: this 'remove' command cannot be executed because it\n"
"would require removing 'conda' dependencies")
sys.exit(msg)
add_unlink(actions, old_fn)
return actions
def remove_features_actions(prefix, index, features):
r = Resolve(index)
linked = r.installed
actions = defaultdict(list)
actions[inst.PREFIX] = prefix
_linked = [d + '.tar.bz2' for d in linked]
to_link = []
for dist in sorted(linked):
fn = dist + '.tar.bz2'
if fn not in index:
continue
if r.track_features(fn).intersection(features):
add_unlink(actions, dist)
if r.features(fn).intersection(features):
add_unlink(actions, dist)
subst = r.find_substitute(_linked, features, fn)
if subst:
to_link.append(subst[:-8])
if to_link:
actions.update(ensure_linked_actions(to_link, prefix))
return actions
def revert_actions(prefix, revision=-1):
h = History(prefix)
h.update()
try:
state = h.get_state(revision)
except IndexError:
sys.exit("Error: no such revision: %d" % revision)
curr = h.get_state()
if state == curr:
return {}
actions = ensure_linked_actions(state, prefix)
for dist in curr - state:
add_unlink(actions, dist)
return actions
# ---------------------------- EXECUTION --------------------------
def execute_actions(actions, index=None, verbose=False):
plan = plan_from_actions(actions)
with History(actions[inst.PREFIX]):
inst.execute_instructions(plan, index, verbose)
def update_old_plan(old_plan):
"""
Update an old plan object to work with
`conda.instructions.execute_instructions`
"""
plan = []
for line in old_plan:
if line.startswith('#'):
continue
if ' ' not in line:
raise CondaException(
"The instruction '%s' takes at least one argument" % line
)
instruction, arg = line.split(' ', 1)
plan.append((instruction, arg))
return plan
def execute_plan(old_plan, index=None, verbose=False):
"""
Deprecated: This should `conda.instructions.execute_instructions` instead
"""
plan = update_old_plan(old_plan)
inst.execute_instructions(plan, index, verbose)
if __name__ == '__main__':
# for testing new revert_actions() only
from pprint import pprint
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
| conda/plan.py
--- a/conda/plan.py
+++ b/conda/plan.py
@@ -330,7 +330,7 @@ def ensure_linked_actions(dists, prefix, index=None, force=False,
actions[inst.LINK].append('%s %d %s' % (dist, lt, shortcuts))
except (OSError, IOError):
- actions[inst.LINK].append(dist, LINK_COPY, shortcuts)
+ actions[inst.LINK].append('%s %d %s' % (dist, LINK_COPY, shortcuts))
finally:
if not extracted_in:
# Remove the dummy data |
tarball install windows
@msarahan @mingwandroid What _should_ the `file://` url format be on Windows?
```
________________________ IntegrationTests.test_python3 ________________________
Traceback (most recent call last):
File "C:\projects\conda\tests\test_create.py", line 146, in test_python3
run_command(Commands.INSTALL, prefix, flask_tar_file)
File "C:\projects\conda\tests\test_create.py", line 104, in run_command
args.func(args, p)
File "C:\projects\conda\conda\cli\main_install.py", line 62, in execute
install(args, parser, 'install')
File "C:\projects\conda\conda\cli\install.py", line 195, in install
explicit(args.packages, prefix, verbose=not args.quiet)
File "C:\projects\conda\conda\misc.py", line 111, in explicit
index.update(fetch_index(channels, **fetch_args))
File "C:\projects\conda\conda\fetch.py", line 266, in fetch_index
for url in iterkeys(channel_urls)]
File "C:\projects\conda\conda\fetch.py", line 67, in func
res = f(*args, **kwargs)
File "C:\projects\conda\conda\fetch.py", line 149, in fetch_repodata
raise RuntimeError(msg)
RuntimeError: Could not find URL: file:///C|/projects/conda/
---------------------------- Captured stdout call -----------------------------
```
The relevant lines to look at here are line 64 in `conda/misc.py`
```
url_p = utils_url_path(url_p).rstrip('/')
```
and line 147 in `conda/utils.py`
```
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
```
Help here is definitely appreciated.
| conda/config.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from collections import OrderedDict
from os.path import abspath, expanduser, isfile, isdir, join
from platform import machine
from .compat import urlparse, string_types
from .utils import try_write, yaml_load
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
default_python = '%d.%d' % sys.version_info[:2]
# CONDA_FORCE_32BIT should only be used when running conda-build (in order
# to build 32-bit packages on a 64-bit system). We don't want to mention it
# in the documentation, because it can mess up a lot of things.
force_32bit = bool(int(os.getenv('CONDA_FORCE_32BIT', 0)))
# ----- operating system and architecture -----
_sys_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
'openbsd5': 'openbsd',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if force_32bit:
bits = 32
if platform == 'linux' and machine() in non_x86_linux_machines:
arch_name = machine()
subdir = 'linux-%s' % arch_name
else:
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
subdir = '%s-%d' % (platform, bits)
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
ADD_BINSTAR_TOKEN = True
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'auto_update_conda',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
'root_dir',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
local_channel = []
rc = root_dir = root_writable = BINSTAR_TOKEN_PAT = channel_alias = None
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc_(path):
if not path or not isfile(path):
return {}
with open(path) as f:
return yaml_load(f) or {}
sys_rc = load_condarc_(sys_rc_path) if isfile(sys_rc_path) else {}
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
# ~/envs for backwards compatibility
lst = ['~/.conda/envs', '~/envs'] + lst
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs32' if force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
# ----- channels -----
# Note, get_*_urls() return unnormalized urls.
def get_local_urls(clear_cache=True):
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
if clear_cache:
from .fetch import fetch_index
fetch_index.cache = {}
if local_channel:
return local_channel
from os.path import exists
from .utils import url_path
try:
from conda_build.config import croot
if exists(croot):
local_channel.append(url_path(croot))
except ImportError:
pass
return local_channel
def get_default_urls():
if 'default_channels' in sys_rc:
return sys_rc['default_channels']
return ['https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro']
def get_rc_urls():
if rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
return url and urlparse.urlparse(url).scheme != ""
def binstar_channel_alias(channel_alias):
if channel_alias.startswith('file:/'):
return channel_alias
if rc.get('add_anaconda_token',
rc.get('add_binstar_token', ADD_BINSTAR_TOKEN)):
try:
from binstar_client.utils import get_binstar
bs = get_binstar()
bs_domain = bs.domain.replace("api", "conda").rstrip('/') + '/'
if channel_alias.startswith(bs_domain) and bs.token:
channel_alias += 't/%s/' % bs.token
except ImportError:
log.debug("Could not import binstar")
pass
except Exception as e:
stderrlog.info("Warning: could not import binstar_client (%s)" % e)
return channel_alias
def hide_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1t/<TOKEN>/', url)
def remove_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
def prioritize_channels(channels):
newchans = OrderedDict()
lastchan = None
priority = 0
for channel in channels:
channel = channel.rstrip('/') + '/'
if channel not in newchans:
channel_s = canonical_channel_name(channel.rsplit('/', 2)[0])
priority += channel_s != lastchan
newchans[channel] = (channel_s, priority)
lastchan = channel_s
return newchans
def normalize_urls(urls, platform=None, offline_only=False):
defaults = tuple(x.rstrip('/') + '/' for x in get_default_urls())
alias = None
newurls = []
while urls:
url = urls[0]
urls = urls[1:]
if url == "system" and rc_path:
urls = get_rc_urls() + urls
continue
elif url in ("defaults", "system"):
t_urls = defaults
elif url == "local":
t_urls = get_local_urls()
else:
t_urls = [url]
for url0 in t_urls:
url0 = url0.rstrip('/')
if not is_url(url0):
if alias is None:
alias = binstar_channel_alias(channel_alias)
url0 = alias + url0
if offline_only and not url0.startswith('file:'):
continue
for plat in (platform or subdir, 'noarch'):
newurls.append('%s/%s/' % (url0, plat))
return newurls
def get_channel_urls(platform=None, offline=False):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
elif 'channels' in rc:
base_urls = ['system']
else:
base_urls = ['defaults']
res = normalize_urls(base_urls, platform, offline)
return res
def canonical_channel_name(channel):
if channel is None:
return '<unknown>'
channel = remove_binstar_tokens(channel).rstrip('/')
if any(channel.startswith(i) for i in get_default_urls()):
return 'defaults'
elif any(channel.startswith(i) for i in get_local_urls(clear_cache=False)):
return 'local'
elif channel.startswith('http://filer/'):
return 'filer'
elif channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1]
elif channel.startswith('http:/'):
channel2 = 'https' + channel[4:]
channel3 = canonical_channel_name(channel2)
return channel3 if channel3 != channel2 else channel
else:
return channel
def url_channel(url):
if url is None:
return None, '<unknown>'
channel = url.rsplit('/', 2)[0]
schannel = canonical_channel_name(channel)
if url.startswith('file://') and schannel != 'local':
channel = schannel = url.rsplit('/', 1)[0]
return channel, schannel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = ['system']
else:
base_urls = ['default']
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers') or {}
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
def load_condarc(path):
rc = load_condarc_(path)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT',
rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
globals().update(locals())
envs_dirs = [abspath(expanduser(p)) for p in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if not sys_rc.get('allow_other_channels', True) and 'channel_alias' in sys_rc:
channel_alias = sys_rc['channel_alias']
channel_alias = channel_alias.rstrip('/')
_binstar = r'((:?%s|binstar\.org|anaconda\.org)/?)(t/[0-9a-zA-Z\-<>]{4,})/'
BINSTAR_TOKEN_PAT = re.compile(_binstar % re.escape(channel_alias))
channel_alias = BINSTAR_TOKEN_PAT.sub(r'\1', channel_alias + '/')
offline = bool(rc.get('offline', False))
add_pip_as_python_dependency = bool(rc.get('add_pip_as_python_dependency', True))
always_yes = bool(rc.get('always_yes', False))
always_copy = bool(rc.get('always_copy', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('anaconda_upload',
rc.get('binstar_upload', None)) # None means ask
allow_softlinks = bool(rc.get('allow_softlinks', True))
auto_update_conda = bool(rc.get('auto_update_conda', rc.get('self_update', True)))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = rc.get('show_channel_urls', None) # None means letting conda decide
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
update_dependencies = bool(rc.get('update_dependencies', True))
channel_priority = bool(rc.get('channel_priority', True))
# ssl_verify can be a boolean value or a filename string
ssl_verify = rc.get('ssl_verify', True)
try:
track_features = rc.get('track_features', [])
if isinstance(track_features, string_types):
track_features = track_features.split()
track_features = set(track_features)
except KeyError:
track_features = None
globals().update(locals())
return rc
load_condarc(rc_path)
<|code_end|>
| conda/config.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from collections import OrderedDict
from os.path import abspath, expanduser, isfile, isdir, join
from platform import machine
from .compat import urlparse, string_types
from .utils import try_write, yaml_load
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
default_python = '%d.%d' % sys.version_info[:2]
# CONDA_FORCE_32BIT should only be used when running conda-build (in order
# to build 32-bit packages on a 64-bit system). We don't want to mention it
# in the documentation, because it can mess up a lot of things.
force_32bit = bool(int(os.getenv('CONDA_FORCE_32BIT', 0)))
# ----- operating system and architecture -----
_sys_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
'openbsd5': 'openbsd',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if force_32bit:
bits = 32
if platform == 'linux' and machine() in non_x86_linux_machines:
arch_name = machine()
subdir = 'linux-%s' % arch_name
else:
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
subdir = '%s-%d' % (platform, bits)
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
ADD_BINSTAR_TOKEN = True
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'auto_update_conda',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
'root_dir',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
local_channel = []
rc = root_dir = root_writable = BINSTAR_TOKEN_PAT = channel_alias = None
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc_(path):
if not path or not isfile(path):
return {}
with open(path) as f:
return yaml_load(f) or {}
sys_rc = load_condarc_(sys_rc_path) if isfile(sys_rc_path) else {}
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
# ~/envs for backwards compatibility
lst = ['~/.conda/envs', '~/envs'] + lst
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs32' if force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
# ----- channels -----
# Note, get_*_urls() return unnormalized urls.
def get_local_urls(clear_cache=True):
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
if clear_cache:
from .fetch import fetch_index
fetch_index.cache = {}
if local_channel:
return local_channel
from os.path import exists
from .utils import url_path
try:
from conda_build.config import croot
if exists(croot):
local_channel.append(url_path(croot))
except ImportError:
pass
return local_channel
def get_default_urls():
if 'default_channels' in sys_rc:
return sys_rc['default_channels']
return ['https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro']
def get_rc_urls():
if rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
if url:
p = urlparse.urlparse(url)
return p.netloc != "" or p.scheme == "file"
def binstar_channel_alias(channel_alias):
if channel_alias.startswith('file:/'):
return channel_alias
if rc.get('add_anaconda_token',
rc.get('add_binstar_token', ADD_BINSTAR_TOKEN)):
try:
from binstar_client.utils import get_binstar
bs = get_binstar()
bs_domain = bs.domain.replace("api", "conda").rstrip('/') + '/'
if channel_alias.startswith(bs_domain) and bs.token:
channel_alias += 't/%s/' % bs.token
except ImportError:
log.debug("Could not import binstar")
pass
except Exception as e:
stderrlog.info("Warning: could not import binstar_client (%s)" % e)
return channel_alias
def hide_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1t/<TOKEN>/', url)
def remove_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
def prioritize_channels(channels):
newchans = OrderedDict()
lastchan = None
priority = 0
for channel in channels:
channel = channel.rstrip('/') + '/'
if channel not in newchans:
channel_s = canonical_channel_name(channel.rsplit('/', 2)[0])
priority += channel_s != lastchan
newchans[channel] = (channel_s, priority)
lastchan = channel_s
return newchans
def normalize_urls(urls, platform=None, offline_only=False):
defaults = tuple(x.rstrip('/') + '/' for x in get_default_urls())
alias = None
newurls = []
while urls:
url = urls[0]
urls = urls[1:]
if url == "system" and rc_path:
urls = get_rc_urls() + urls
continue
elif url in ("defaults", "system"):
t_urls = defaults
elif url == "local":
t_urls = get_local_urls()
else:
t_urls = [url]
for url0 in t_urls:
url0 = url0.rstrip('/')
if not is_url(url0):
if alias is None:
alias = binstar_channel_alias(channel_alias)
url0 = alias + url0
if offline_only and not url0.startswith('file:'):
continue
for plat in (platform or subdir, 'noarch'):
newurls.append('%s/%s/' % (url0, plat))
return newurls
def get_channel_urls(platform=None, offline=False):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
elif 'channels' in rc:
base_urls = ['system']
else:
base_urls = ['defaults']
res = normalize_urls(base_urls, platform, offline)
return res
def canonical_channel_name(channel):
if channel is None:
return '<unknown>'
channel = remove_binstar_tokens(channel).rstrip('/')
if any(channel.startswith(i) for i in get_default_urls()):
return 'defaults'
elif any(channel.startswith(i) for i in get_local_urls(clear_cache=False)):
return 'local'
elif channel.startswith('http://filer/'):
return 'filer'
elif channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1]
elif channel.startswith('http:/'):
channel2 = 'https' + channel[4:]
channel3 = canonical_channel_name(channel2)
return channel3 if channel3 != channel2 else channel
else:
return channel
def url_channel(url):
if url is None:
return None, '<unknown>'
channel = url.rsplit('/', 2)[0]
schannel = canonical_channel_name(channel)
if url.startswith('file://') and schannel != 'local':
channel = schannel = url.rsplit('/', 1)[0]
return channel, schannel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = ['system']
else:
base_urls = ['default']
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers') or {}
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
def load_condarc(path):
rc = load_condarc_(path)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT',
rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
globals().update(locals())
envs_dirs = [abspath(expanduser(p)) for p in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if not sys_rc.get('allow_other_channels', True) and 'channel_alias' in sys_rc:
channel_alias = sys_rc['channel_alias']
channel_alias = channel_alias.rstrip('/')
_binstar = r'((:?%s|binstar\.org|anaconda\.org)/?)(t/[0-9a-zA-Z\-<>]{4,})/'
BINSTAR_TOKEN_PAT = re.compile(_binstar % re.escape(channel_alias))
channel_alias = BINSTAR_TOKEN_PAT.sub(r'\1', channel_alias + '/')
offline = bool(rc.get('offline', False))
add_pip_as_python_dependency = bool(rc.get('add_pip_as_python_dependency', True))
always_yes = bool(rc.get('always_yes', False))
always_copy = bool(rc.get('always_copy', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('anaconda_upload',
rc.get('binstar_upload', None)) # None means ask
allow_softlinks = bool(rc.get('allow_softlinks', True))
auto_update_conda = bool(rc.get('auto_update_conda', rc.get('self_update', True)))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = rc.get('show_channel_urls', None) # None means letting conda decide
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
update_dependencies = bool(rc.get('update_dependencies', True))
channel_priority = bool(rc.get('channel_priority', True))
# ssl_verify can be a boolean value or a filename string
ssl_verify = rc.get('ssl_verify', True)
try:
track_features = rc.get('track_features', [])
if isinstance(track_features, string_types):
track_features = track_features.split()
track_features = set(track_features)
except KeyError:
track_features = None
globals().update(locals())
return rc
load_condarc(rc_path)
| conda/config.py
--- a/conda/config.py
+++ b/conda/config.py
@@ -195,7 +195,9 @@ def get_rc_urls():
return rc['channels']
def is_url(url):
- return url and urlparse.urlparse(url).scheme != ""
+ if url:
+ p = urlparse.urlparse(url)
+ return p.netloc != "" or p.scheme == "file"
def binstar_channel_alias(channel_alias):
if channel_alias.startswith('file:/'): |
conda config --set show_channel_urls yes doesn't work anymore
This is happening since the latest conda update:
``` bat
λ conda config --set show_channel_urls yes
Error: Key: show_channel_urls; yes is not a YAML boolean.
```
It happens with both conda 4.1.1 (local windows py 3.5) and 4.1.0 (appveyor, https://ci.appveyor.com/project/mdboom/matplotlib/build/1.0.1774) and it worked with 4.0.8 (https://ci.appveyor.com/project/mdboom/matplotlib/build/1.0.1765/job/bkldg98f8p087xmf)
| conda/cli/main_config.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from .common import (Completer, add_parser_json, error_and_exit, exception_and_exit,
stdout_json_success)
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
from ..utils import yaml_load, yaml_dump
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class CouldntParse(NotImplementedError):
def __init__(self, reason):
self.args = ["""Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s""" % reason]
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar=('KEY'),
choices=BoolOrListKey()
)
action.add_argument(
"--add",
nargs=2,
action="append",
help="""Add one configuration value to a list key. The default
behavior is to prepend.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParse, NotImplementedError) as e:
if args.json:
exception_and_exit(e, json=True)
else:
raise
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh)
else:
rc_config = {}
# add `defaults` channel if creating new condarc file or channel key doesn't exist currently
if 'channels' not in rc_config:
# now check to see if user wants to modify channels at all
if any('channels' in item[0] for item in args.add):
# don't need to insert defaults if it's already in args
if not ['channels', 'defaults'] in args.add:
args.add.insert(0, ['channels', 'defaults'])
# Get
if args.get is not None:
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if args.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else:
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
for item in reversed(rc_config.get(key, [])):
# Use repr so that it can be pasted back in to conda config --add
print("--add", key, repr(item))
# Add
for key, item in args.add:
if key not in rc_list_keys:
error_and_exit("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key), json=args.json,
error_type="ValueError")
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParse("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
if item in rc_config.get(key, []):
# Right now, all list keys should not contain duplicates
message = "Skipping %s: %s, item already exists" % (key, item)
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
rc_config.setdefault(key, []).insert(0, item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
yamlitem = yaml_load(item)
if key in set_bools:
if not isinstance(yamlitem, bool):
error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item),
json=args.json, error_type="TypeError")
rc_config[key] = yamlitem
elif key in set_strings:
rc_config[key] = yamlitem
else:
error_and_exit("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key), json=args.json,
error_type="ValueError")
# Remove
for key, item in args.remove:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
if item not in rc_config[key]:
error_and_exit("%r is not in the %r key of the config file" %
(item, key), json=args.json, error_type="KeyError")
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if args.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
<|code_end|>
conda/connection.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import base64
import cgi
import email
import ftplib
import mimetypes
import os
import platform
import re
import requests
import tempfile
from io import BytesIO
from logging import getLogger
from . import __version__ as VERSION
from .compat import urlparse, StringIO
from .config import platform as config_platform, ssl_verify, get_proxy_servers
from .utils import gnu_get_libc_version
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if config_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif config_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=VERSION,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
self.auth = NullAuth() # disable .netrc file. for reference, see
# https://github.com/Anaconda-Platform/anaconda-client/pull/298
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = ssl_verify
class NullAuth(requests.auth.AuthBase):
'''force requests to ignore the ``.netrc``
Some sites do not support regular authentication, but we still
want to store credentials in the ``.netrc`` file and submit them
as form elements. Without this, requests would otherwise use the
.netrc which leads, on some sites, to a 401 error.
https://github.com/kennethreitz/requests/issues/2773
Use with::
requests.get(url, auth=NullAuth())
'''
def __call__(self, r):
return r
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
<|code_end|>
conda/utils.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import logging
import sys
import hashlib
import collections
from functools import partial
from os.path import abspath, isdir, join
import os
import re
import subprocess
import tempfile
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(sorted(kw.items())))
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(sorted(kw.items())))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
@memoized
def gnu_get_libc_version():
"""
If on linux, get installed version of glibc, otherwise return None
"""
if not sys.platform.startswith('linux'):
return None
from ctypes import CDLL, cdll, c_char_p
cdll.LoadLibrary('libc.so.6')
libc = CDLL('libc.so.6')
f = libc.gnu_get_libc_version
f.restype = c_char_p
return f()
def can_open(file):
"""
Return True if the given ``file`` can be opened for writing
"""
try:
fp = open(file, "ab")
fp.close()
return True
except IOError:
stderrlog.info("Unable to open %s\n" % file)
return False
def can_open_all(files):
"""
Return True if all of the provided ``files`` can be opened
"""
for f in files:
if not can_open(f):
return False
return True
def can_open_all_files_in_prefix(prefix, files):
"""
Returns True if all ``files`` at a given ``prefix`` can be opened
"""
return can_open_all((os.path.join(prefix, f) for f in files))
def try_write(dir_path):
if not isdir(dir_path):
return False
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % os.getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
os.unlink(temp_filename)
return True
except (IOError, OSError):
return False
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
def run_in(command, shell, cwd=None, env=None):
if hasattr(shell, "keys"):
shell = shell["exe"]
if shell == 'cmd.exe':
cmd_script = tempfile.NamedTemporaryFile(suffix='.bat', mode='wt', delete=False)
cmd_script.write(command)
cmd_script.close()
cmd_bits = [shells[shell]["exe"]] + shells[shell]["shell_args"] + [cmd_script.name]
try:
p = subprocess.Popen(cmd_bits, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=cwd, env=env)
stdout, stderr = p.communicate()
finally:
os.unlink(cmd_script.name)
elif shell == 'powershell':
raise NotImplementedError
else:
cmd_bits = ([shells[shell]["exe"]] + shells[shell]["shell_args"] +
[translate_stream(command, shells[shell]["path_to"])])
p = subprocess.Popen(cmd_bits, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
streams = [u"%s" % stream.decode('utf-8').replace('\r\n', '\n').rstrip("\n")
for stream in (stdout, stderr)]
return streams
def path_identity(path):
"""Used as a dummy path converter where no conversion necessary"""
return path
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def _translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/")
return root_prefix + "/" + found
path = re.sub(path_re, _translation, path).replace(";/", ":/")
return path
def unix_path_to_win(path, root_prefix=""):
"""Convert a path or :-separated string of paths into a Windows representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
# already a windows path
return path.replace("/", "\\")
path_re = root_prefix + r'(/[a-zA-Z]/(?:(?![:\s]/)[^:*?"<>])*)'
def _translation(found_path):
group = found_path.group(0)
return "{0}:{1}".format(group[len(root_prefix)+1],
group[len(root_prefix)+2:].replace("/", "\\"))
translation = re.sub(path_re, _translation, path)
translation = re.sub(":([a-zA-Z]):\\\\",
lambda match: ";" + match.group(0)[1] + ":\\",
translation)
return translation
# curry cygwin functions
def win_path_to_cygwin(path):
return win_path_to_unix(path, "/cygdrive")
def cygwin_path_to_win(path):
return unix_path_to_win(path, "/cygdrive")
def translate_stream(stream, translator):
return "\n".join(translator(line) for line in stream.split("\n"))
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
# This is necessary for Windows, for linking the environment, and for printing the correct
# activation instructions on Windows, depending on the shell type. It would be great to
# get rid of it, but I don't know how to otherwise detect which shell is used to create
# or install conda packages.
def find_parent_shell(path=False, max_stack_depth=10):
"""return process name or path of parent. Default is to return only name of process."""
try:
import psutil
except ImportError:
stderrlog.warn("No psutil available.\n"
"To proceed, please conda install psutil")
return None
process = psutil.Process()
pname = process.parent().name().lower()
stack_depth = 0
while (any(proc in pname for proc in ["conda", "python", "py.test"]) and
stack_depth < max_stack_depth):
if process:
process = process.parent()
pname = process.parent().name().lower()
stack_depth += 1
else:
# fallback defaults to system default
if sys.platform == 'win32':
return 'cmd.exe'
else:
return 'bash'
if path:
return process.parent().exe()
return process.parent().name()
@memoized
def get_yaml():
try:
import ruamel_yaml as yaml
except ImportError:
try:
import ruamel.yaml as yaml
except ImportError:
try:
import yaml
except ImportError:
sys.exit("No yaml library available.\n"
"To proceed, please conda install ruamel_yaml")
return yaml
def yaml_load(filehandle):
yaml = get_yaml()
try:
return yaml.load(filehandle, Loader=yaml.RoundTripLoader, version="1.2")
except AttributeError:
return yaml.load(filehandle)
def yaml_dump(string):
yaml = get_yaml()
try:
return yaml.dump(string, Dumper=yaml.RoundTripDumper,
block_seq_indent=2, default_flow_style=False,
indent=4)
except AttributeError:
return yaml.dump(string, default_flow_style=False)
# TODO: this should be done in a more extensible way
# (like files for each shell, with some registration mechanism.)
# defaults for unix shells. Note: missing "exe" entry, which should be set to
# either an executable on PATH, or a full path to an executable for a shell
unix_shell_base = dict(
binpath="/bin/", # mind the trailing slash.
echo="echo",
env_script_suffix=".sh",
nul='2>/dev/null',
path_from=path_identity,
path_to=path_identity,
pathsep=":",
printdefaultenv='echo $CONDA_DEFAULT_ENV',
printpath="echo $PATH",
printps1='echo $PS1',
promptvar='PS1',
sep="/",
set_var='export ',
shell_args=["-l", "-c"],
shell_suffix="",
slash_convert=("\\", "/"),
source_setup="source",
test_echo_extra="",
var_format="${}",
)
msys2_shell_base = dict(
unix_shell_base,
path_from=unix_path_to_win,
path_to=win_path_to_unix,
binpath="/Scripts/", # mind the trailing slash.
)
if sys.platform == "win32":
shells = {
# "powershell.exe": dict(
# echo="echo",
# test_echo_extra=" .",
# var_format="${var}",
# binpath="/bin/", # mind the trailing slash.
# source_setup="source",
# nul='2>/dev/null',
# set_var='export ',
# shell_suffix=".ps",
# env_script_suffix=".ps",
# printps1='echo $PS1',
# printdefaultenv='echo $CONDA_DEFAULT_ENV',
# printpath="echo %PATH%",
# exe="powershell.exe",
# path_from=path_identity,
# path_to=path_identity,
# slash_convert = ("/", "\\"),
# ),
"cmd.exe": dict(
echo="@echo",
var_format="%{}%",
binpath="\\Scripts\\", # mind the trailing slash.
source_setup="call",
test_echo_extra="",
nul='1>NUL 2>&1',
set_var='set ',
shell_suffix=".bat",
env_script_suffix=".bat",
printps1="@echo %PROMPT%",
promptvar="PROMPT",
# parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA
printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n'
'echo %CONDA_DEFAULT_ENV% ) ELSE (\n'
'echo()',
printpath="@echo %PATH%",
exe="cmd.exe",
shell_args=["/d", "/c"],
path_from=path_identity,
path_to=path_identity,
slash_convert=("/", "\\"),
sep="\\",
pathsep=";",
),
"cygwin": dict(
unix_shell_base,
exe="bash.exe",
binpath="/Scripts/", # mind the trailing slash.
path_from=cygwin_path_to_win,
path_to=win_path_to_cygwin
),
# bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin
# entry instead. The only major difference is that it handle's cygwin's /cygdrive
# filesystem root.
"bash.exe": dict(
msys2_shell_base, exe="bash.exe",
),
"sh.exe": dict(
msys2_shell_base, exe="sh.exe",
),
}
else:
shells = {
"bash": dict(
unix_shell_base, exe="bash",
),
"zsh": dict(
unix_shell_base, exe="zsh",
),
"fish": dict(
unix_shell_base, exe="fish",
pathsep=" ",
),
}
<|code_end|>
| conda/cli/main_config.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from .common import (Completer, add_parser_json, error_and_exit, exception_and_exit,
stdout_json_success)
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
from ..utils import yaml_load, yaml_dump, yaml_bool
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class CouldntParse(NotImplementedError):
def __init__(self, reason):
self.args = ["""Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s""" % reason]
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar=('KEY'),
choices=BoolOrListKey()
)
action.add_argument(
"--add",
nargs=2,
action="append",
help="""Add one configuration value to a list key. The default
behavior is to prepend.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParse, NotImplementedError) as e:
if args.json:
exception_and_exit(e, json=True)
else:
raise
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh)
else:
rc_config = {}
# add `defaults` channel if creating new condarc file or channel key doesn't exist currently
if 'channels' not in rc_config:
# now check to see if user wants to modify channels at all
if any('channels' in item[0] for item in args.add):
# don't need to insert defaults if it's already in args
if not ['channels', 'defaults'] in args.add:
args.add.insert(0, ['channels', 'defaults'])
# Get
if args.get is not None:
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if args.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else:
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
for item in reversed(rc_config.get(key, [])):
# Use repr so that it can be pasted back in to conda config --add
print("--add", key, repr(item))
# Add
for key, item in args.add:
if key not in rc_list_keys:
error_and_exit("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key), json=args.json,
error_type="ValueError")
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParse("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
if item in rc_config.get(key, []):
# Right now, all list keys should not contain duplicates
message = "Skipping %s: %s, item already exists" % (key, item)
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
rc_config.setdefault(key, []).insert(0, item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
if key in set_bools:
itemb = yaml_bool(item)
if itemb is None:
error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item),
json=args.json, error_type="TypeError")
rc_config[key] = itemb
elif key in set_strings:
rc_config[key] = item
else:
error_and_exit("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key), json=args.json,
error_type="ValueError")
# Remove
for key, item in args.remove:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
if item not in rc_config[key]:
error_and_exit("%r is not in the %r key of the config file" %
(item, key), json=args.json, error_type="KeyError")
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if args.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
conda/connection.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import base64
import cgi
import email
import ftplib
import mimetypes
import os
import platform
import re
import requests
import tempfile
from io import BytesIO
from logging import getLogger
from . import __version__ as VERSION
from .compat import urlparse, StringIO
from .config import platform as config_platform, ssl_verify, get_proxy_servers
from .utils import gnu_get_libc_version, yaml_bool
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if config_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif config_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=VERSION,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
self.auth = NullAuth() # disable .netrc file. for reference, see
# https://github.com/Anaconda-Platform/anaconda-client/pull/298
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = yaml_bool(ssl_verify, ssl_verify)
class NullAuth(requests.auth.AuthBase):
'''force requests to ignore the ``.netrc``
Some sites do not support regular authentication, but we still
want to store credentials in the ``.netrc`` file and submit them
as form elements. Without this, requests would otherwise use the
.netrc which leads, on some sites, to a 401 error.
https://github.com/kennethreitz/requests/issues/2773
Use with::
requests.get(url, auth=NullAuth())
'''
def __call__(self, r):
return r
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
conda/utils.py
from __future__ import print_function, division, absolute_import
import logging
import sys
import hashlib
import collections
from functools import partial
from os.path import abspath, isdir, join
import os
import re
import subprocess
import tempfile
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(sorted(kw.items())))
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(sorted(kw.items())))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
@memoized
def gnu_get_libc_version():
"""
If on linux, get installed version of glibc, otherwise return None
"""
if not sys.platform.startswith('linux'):
return None
from ctypes import CDLL, cdll, c_char_p
cdll.LoadLibrary('libc.so.6')
libc = CDLL('libc.so.6')
f = libc.gnu_get_libc_version
f.restype = c_char_p
return f()
def can_open(file):
"""
Return True if the given ``file`` can be opened for writing
"""
try:
fp = open(file, "ab")
fp.close()
return True
except IOError:
stderrlog.info("Unable to open %s\n" % file)
return False
def can_open_all(files):
"""
Return True if all of the provided ``files`` can be opened
"""
for f in files:
if not can_open(f):
return False
return True
def can_open_all_files_in_prefix(prefix, files):
"""
Returns True if all ``files`` at a given ``prefix`` can be opened
"""
return can_open_all((os.path.join(prefix, f) for f in files))
def try_write(dir_path):
if not isdir(dir_path):
return False
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % os.getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
os.unlink(temp_filename)
return True
except (IOError, OSError):
return False
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def url_path(path):
path = abspath(path)
if sys.platform == 'win32':
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
def run_in(command, shell, cwd=None, env=None):
if hasattr(shell, "keys"):
shell = shell["exe"]
if shell == 'cmd.exe':
cmd_script = tempfile.NamedTemporaryFile(suffix='.bat', mode='wt', delete=False)
cmd_script.write(command)
cmd_script.close()
cmd_bits = [shells[shell]["exe"]] + shells[shell]["shell_args"] + [cmd_script.name]
try:
p = subprocess.Popen(cmd_bits, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=cwd, env=env)
stdout, stderr = p.communicate()
finally:
os.unlink(cmd_script.name)
elif shell == 'powershell':
raise NotImplementedError
else:
cmd_bits = ([shells[shell]["exe"]] + shells[shell]["shell_args"] +
[translate_stream(command, shells[shell]["path_to"])])
p = subprocess.Popen(cmd_bits, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
streams = [u"%s" % stream.decode('utf-8').replace('\r\n', '\n').rstrip("\n")
for stream in (stdout, stderr)]
return streams
def path_identity(path):
"""Used as a dummy path converter where no conversion necessary"""
return path
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def _translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/")
return root_prefix + "/" + found
path = re.sub(path_re, _translation, path).replace(";/", ":/")
return path
def unix_path_to_win(path, root_prefix=""):
"""Convert a path or :-separated string of paths into a Windows representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
# already a windows path
return path.replace("/", "\\")
path_re = root_prefix + r'(/[a-zA-Z]/(?:(?![:\s]/)[^:*?"<>])*)'
def _translation(found_path):
group = found_path.group(0)
return "{0}:{1}".format(group[len(root_prefix)+1],
group[len(root_prefix)+2:].replace("/", "\\"))
translation = re.sub(path_re, _translation, path)
translation = re.sub(":([a-zA-Z]):\\\\",
lambda match: ";" + match.group(0)[1] + ":\\",
translation)
return translation
# curry cygwin functions
def win_path_to_cygwin(path):
return win_path_to_unix(path, "/cygdrive")
def cygwin_path_to_win(path):
return unix_path_to_win(path, "/cygdrive")
def translate_stream(stream, translator):
return "\n".join(translator(line) for line in stream.split("\n"))
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
# This is necessary for Windows, for linking the environment, and for printing the correct
# activation instructions on Windows, depending on the shell type. It would be great to
# get rid of it, but I don't know how to otherwise detect which shell is used to create
# or install conda packages.
def find_parent_shell(path=False, max_stack_depth=10):
"""return process name or path of parent. Default is to return only name of process."""
try:
import psutil
except ImportError:
stderrlog.warn("No psutil available.\n"
"To proceed, please conda install psutil")
return None
process = psutil.Process()
pname = process.parent().name().lower()
stack_depth = 0
while (any(proc in pname for proc in ["conda", "python", "py.test"]) and
stack_depth < max_stack_depth):
if process:
process = process.parent()
pname = process.parent().name().lower()
stack_depth += 1
else:
# fallback defaults to system default
if sys.platform == 'win32':
return 'cmd.exe'
else:
return 'bash'
if path:
return process.parent().exe()
return process.parent().name()
@memoized
def get_yaml():
try:
import ruamel_yaml as yaml
except ImportError:
try:
import ruamel.yaml as yaml
except ImportError:
try:
import yaml
except ImportError:
sys.exit("No yaml library available.\n"
"To proceed, please conda install ruamel_yaml")
return yaml
# Restores YAML 1.1 boolean flexibility.
yaml_bool_ = {
'true': True, 'yes': True, 'on': True,
'false': False, 'no': False, 'off': False
}
def yaml_bool(s, passthrough=None):
if type(s) is bool:
return s
try:
return yaml_bool_.get(s.lower(), passthrough)
except AttributeError:
return passthrough
def yaml_load(filehandle):
yaml = get_yaml()
try:
return yaml.load(filehandle, Loader=yaml.RoundTripLoader, version="1.2")
except AttributeError:
return yaml.load(filehandle)
def yaml_dump(string):
yaml = get_yaml()
try:
return yaml.dump(string, Dumper=yaml.RoundTripDumper,
block_seq_indent=2, default_flow_style=False,
indent=4)
except AttributeError:
return yaml.dump(string, default_flow_style=False)
# TODO: this should be done in a more extensible way
# (like files for each shell, with some registration mechanism.)
# defaults for unix shells. Note: missing "exe" entry, which should be set to
# either an executable on PATH, or a full path to an executable for a shell
unix_shell_base = dict(
binpath="/bin/", # mind the trailing slash.
echo="echo",
env_script_suffix=".sh",
nul='2>/dev/null',
path_from=path_identity,
path_to=path_identity,
pathsep=":",
printdefaultenv='echo $CONDA_DEFAULT_ENV',
printpath="echo $PATH",
printps1='echo $PS1',
promptvar='PS1',
sep="/",
set_var='export ',
shell_args=["-l", "-c"],
shell_suffix="",
slash_convert=("\\", "/"),
source_setup="source",
test_echo_extra="",
var_format="${}",
)
msys2_shell_base = dict(
unix_shell_base,
path_from=unix_path_to_win,
path_to=win_path_to_unix,
binpath="/Scripts/", # mind the trailing slash.
)
if sys.platform == "win32":
shells = {
# "powershell.exe": dict(
# echo="echo",
# test_echo_extra=" .",
# var_format="${var}",
# binpath="/bin/", # mind the trailing slash.
# source_setup="source",
# nul='2>/dev/null',
# set_var='export ',
# shell_suffix=".ps",
# env_script_suffix=".ps",
# printps1='echo $PS1',
# printdefaultenv='echo $CONDA_DEFAULT_ENV',
# printpath="echo %PATH%",
# exe="powershell.exe",
# path_from=path_identity,
# path_to=path_identity,
# slash_convert = ("/", "\\"),
# ),
"cmd.exe": dict(
echo="@echo",
var_format="%{}%",
binpath="\\Scripts\\", # mind the trailing slash.
source_setup="call",
test_echo_extra="",
nul='1>NUL 2>&1',
set_var='set ',
shell_suffix=".bat",
env_script_suffix=".bat",
printps1="@echo %PROMPT%",
promptvar="PROMPT",
# parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA
printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n'
'echo %CONDA_DEFAULT_ENV% ) ELSE (\n'
'echo()',
printpath="@echo %PATH%",
exe="cmd.exe",
shell_args=["/d", "/c"],
path_from=path_identity,
path_to=path_identity,
slash_convert=("/", "\\"),
sep="\\",
pathsep=";",
),
"cygwin": dict(
unix_shell_base,
exe="bash.exe",
binpath="/Scripts/", # mind the trailing slash.
path_from=cygwin_path_to_win,
path_to=win_path_to_cygwin
),
# bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin
# entry instead. The only major difference is that it handle's cygwin's /cygdrive
# filesystem root.
"bash.exe": dict(
msys2_shell_base, exe="bash.exe",
),
"sh.exe": dict(
msys2_shell_base, exe="sh.exe",
),
}
else:
shells = {
"bash": dict(
unix_shell_base, exe="bash",
),
"zsh": dict(
unix_shell_base, exe="zsh",
),
"fish": dict(
unix_shell_base, exe="fish",
pathsep=" ",
),
}
| conda/cli/main_config.py
--- a/conda/cli/main_config.py
+++ b/conda/cli/main_config.py
@@ -13,7 +13,7 @@
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
-from ..utils import yaml_load, yaml_dump
+from ..utils import yaml_load, yaml_dump, yaml_bool
descr = """
Modify configuration values in .condarc. This is modeled after the git
@@ -289,14 +289,14 @@ def execute_config(args, parser):
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
- yamlitem = yaml_load(item)
if key in set_bools:
- if not isinstance(yamlitem, bool):
+ itemb = yaml_bool(item)
+ if itemb is None:
error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item),
json=args.json, error_type="TypeError")
- rc_config[key] = yamlitem
+ rc_config[key] = itemb
elif key in set_strings:
- rc_config[key] = yamlitem
+ rc_config[key] = item
else:
error_and_exit("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key), json=args.json,
conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -22,7 +22,7 @@
from . import __version__ as VERSION
from .compat import urlparse, StringIO
from .config import platform as config_platform, ssl_verify, get_proxy_servers
-from .utils import gnu_get_libc_version
+from .utils import gnu_get_libc_version, yaml_bool
RETRIES = 3
@@ -110,7 +110,7 @@ def __init__(self, *args, **kwargs):
self.headers['User-Agent'] = user_agent
- self.verify = ssl_verify
+ self.verify = yaml_bool(ssl_verify, ssl_verify)
class NullAuth(requests.auth.AuthBase):
conda/utils.py
--- a/conda/utils.py
+++ b/conda/utils.py
@@ -292,6 +292,20 @@ def get_yaml():
return yaml
+# Restores YAML 1.1 boolean flexibility.
+yaml_bool_ = {
+ 'true': True, 'yes': True, 'on': True,
+ 'false': False, 'no': False, 'off': False
+}
+def yaml_bool(s, passthrough=None):
+ if type(s) is bool:
+ return s
+ try:
+ return yaml_bool_.get(s.lower(), passthrough)
+ except AttributeError:
+ return passthrough
+
+
def yaml_load(filehandle):
yaml = get_yaml()
try: |
Basic access authentification channels broken in 4.1.0
Previously it was possible to have a channel like:
https://user:password@myserver.com
or even have the user and password saved in `.netrc`. conda 4.1.0 and up ignore the credentials and give a 401 error (causing everything to break for users).
```
$ conda install conda=4.1.0
Using Anaconda Cloud api site https://api.anaconda.org
Fetching package metadata ...........Error: HTTPError: 401 Client Error: Unauthorized for url:https://user:password@myserver.com
```
conda=4.1 no longer respects HTTP_PROXY
My proxy variables are properly set:
```
$ set https_proxy
HTTPS_PROXY=http://myproxy:8080
```
`conda=4.0.8` successfully picks up my `%HTTPS_PROXY%` and installs `conda=4.1`:
```
$ conda install conda=4.1
Fetching package metadata: ..........
Solving package specifications: .........
The following packages will be UPDATED:
conda: 4.0.8-py27_0 defaults --> 4.1.0-py27_0 defaults
```
now trying to revert back to `conda=4.0`:
```
$ conda install conda=4.0
Fetching package metadata .........
Could not connect to https://repo.continuum.io/pkgs/free/win-64/
Connection error: HTTPSConnectionPool(host='repo.continuum.io', port=443): Max retries exceeded with url...
```
proxy settings no longer work. Configuring `proxy_servers` in `.condarc` restores connectivity.
| conda/connection.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import base64
import cgi
import email
import ftplib
import mimetypes
import os
import platform
import re
import requests
import tempfile
from io import BytesIO
from logging import getLogger
from . import __version__ as VERSION
from .compat import urlparse, StringIO
from .config import platform as config_platform, ssl_verify, get_proxy_servers
from .utils import gnu_get_libc_version
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if config_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif config_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=VERSION,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
self.auth = NullAuth() # disable .netrc file. for reference, see
# https://github.com/Anaconda-Platform/anaconda-client/pull/298
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = ssl_verify
class NullAuth(requests.auth.AuthBase):
'''force requests to ignore the ``.netrc``
Some sites do not support regular authentication, but we still
want to store credentials in the ``.netrc`` file and submit them
as form elements. Without this, requests would otherwise use the
.netrc which leads, on some sites, to a 401 error.
https://github.com/kennethreitz/requests/issues/2773
Use with::
requests.get(url, auth=NullAuth())
'''
def __call__(self, r):
return r
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
<|code_end|>
| conda/connection.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import base64
import cgi
import email
import ftplib
import mimetypes
import os
import platform
import re
import requests
import tempfile
from io import BytesIO
from logging import getLogger
from . import __version__ as VERSION
from .compat import urlparse, StringIO
from .config import platform as config_platform, ssl_verify, get_proxy_servers
from .utils import gnu_get_libc_version
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if config_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif config_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=VERSION,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
# Modified from code in pip/download.py:
# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable ftp:// urls
self.mount("ftp://", FTPAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_S3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
def url_to_S3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = requests.packages.urllib3.util.url.parse_url(url)
assert parsed_url.scheme == 's3', (
"You can only use s3: urls (not %r)" % url)
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urlparse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = base64.b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse.urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
class AuthError(Exception):
'''Denotes an error with authentication.'''
pass
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
# Taken from urllib3 (actually
# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to
# requests.packages.urllib3 we can just use that.
def unparse_url(U):
"""
Convert a :class:`.Url` into a url
The input can be any iterable that gives ['scheme', 'auth', 'host',
'port', 'path', 'query', 'fragment']. Unused items should be None.
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port).
Example: ::
>>> Url = parse_url('http://google.com/mail/')
>>> unparse_url(Url)
'http://google.com/mail/'
>>> unparse_url(['http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment'])
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = U
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url = scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
| conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -90,8 +90,6 @@ def __init__(self, *args, **kwargs):
proxies = get_proxy_servers()
if proxies:
self.proxies = proxies
- self.auth = NullAuth() # disable .netrc file. for reference, see
- # https://github.com/Anaconda-Platform/anaconda-client/pull/298
# Configure retries
if retries:
@@ -112,22 +110,6 @@ def __init__(self, *args, **kwargs):
self.verify = ssl_verify
-
-class NullAuth(requests.auth.AuthBase):
- '''force requests to ignore the ``.netrc``
- Some sites do not support regular authentication, but we still
- want to store credentials in the ``.netrc`` file and submit them
- as form elements. Without this, requests would otherwise use the
- .netrc which leads, on some sites, to a 401 error.
- https://github.com/kennethreitz/requests/issues/2773
- Use with::
- requests.get(url, auth=NullAuth())
- '''
-
- def __call__(self, r):
- return r
-
-
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self): |
conda update conda doesn't get latest conda-env
It's annoying we even have this problem, but...
```
root@default:~ # conda update conda
Fetching package metadata: ......
.Solving package specifications: .........
Package plan for installation in environment /usr/local:
The following packages will be downloaded:
package | build
---------------------------|-----------------
conda-env-2.5.0 | py27_0 28 KB
conda-4.1.2 | py27_0 198 KB
------------------------------------------------------------
Total: 226 KB
The following NEW packages will be INSTALLED:
ruamel_yaml: 0.11.7-py27_0
The following packages will be UPDATED:
conda: 4.0.5-py27_0 --> 4.1.2-py27_0
conda-env: 2.4.5-py27_0 --> 2.5.0-py27_0
Proceed ([y]/n)? y
Fetching packages ...
conda-env-2.5. 100% |#########################################################################################| Time: 0:00:00 587.12 kB/s
conda-4.1.2-py 100% |#########################################################################################| Time: 0:00:00 994.90 kB/s
Extracting packages ...
[ COMPLETE ]|############################################################################################################| 100%
Unlinking packages ...
[ COMPLETE ]|############################################################################################################| 100%
Linking packages ...
[ COMPLETE ]|############################################################################################################| 100%
root@default:~ # conda update conda-env
Fetching package metadata .........
Solving package specifications: ..........
Package plan for installation in environment /usr/local:
The following packages will be downloaded:
package | build
---------------------------|-----------------
conda-env-2.5.1 | py27_0 26 KB
The following packages will be UPDATED:
conda-env: 2.5.0-py27_0 --> 2.5.1-py27_0
Proceed ([y]/n)? y
Fetching packages ...
conda-env-2.5. 100% |#########################################################################################| Time: 0:00:00 569.65 kB/s
Extracting packages ...
[ COMPLETE ]|############################################################################################################| 100%
Unlinking packages ...
[ COMPLETE ]|############################################################################################################| 100%
Linking packages ...
[ COMPLETE ]|############################################################################################################| 100%
```
conda update conda doesn't get latest conda-env
It's annoying we even have this problem, but...
```
root@default:~ # conda update conda
Fetching package metadata: ......
.Solving package specifications: .........
Package plan for installation in environment /usr/local:
The following packages will be downloaded:
package | build
---------------------------|-----------------
conda-env-2.5.0 | py27_0 28 KB
conda-4.1.2 | py27_0 198 KB
------------------------------------------------------------
Total: 226 KB
The following NEW packages will be INSTALLED:
ruamel_yaml: 0.11.7-py27_0
The following packages will be UPDATED:
conda: 4.0.5-py27_0 --> 4.1.2-py27_0
conda-env: 2.4.5-py27_0 --> 2.5.0-py27_0
Proceed ([y]/n)? y
Fetching packages ...
conda-env-2.5. 100% |#########################################################################################| Time: 0:00:00 587.12 kB/s
conda-4.1.2-py 100% |#########################################################################################| Time: 0:00:00 994.90 kB/s
Extracting packages ...
[ COMPLETE ]|############################################################################################################| 100%
Unlinking packages ...
[ COMPLETE ]|############################################################################################################| 100%
Linking packages ...
[ COMPLETE ]|############################################################################################################| 100%
root@default:~ # conda update conda-env
Fetching package metadata .........
Solving package specifications: ..........
Package plan for installation in environment /usr/local:
The following packages will be downloaded:
package | build
---------------------------|-----------------
conda-env-2.5.1 | py27_0 26 KB
The following packages will be UPDATED:
conda-env: 2.5.0-py27_0 --> 2.5.1-py27_0
Proceed ([y]/n)? y
Fetching packages ...
conda-env-2.5. 100% |#########################################################################################| Time: 0:00:00 569.65 kB/s
Extracting packages ...
[ COMPLETE ]|############################################################################################################| 100%
Unlinking packages ...
[ COMPLETE ]|############################################################################################################| 100%
Linking packages ...
[ COMPLETE ]|############################################################################################################| 100%
```
| conda/plan.py
<|code_start|>
"""
Handle the planning of installs and their execution.
NOTE:
conda.install uses canonical package names in its interface functions,
whereas conda.resolve uses package filenames, as those are used as index
keys. We try to keep fixes to this "impedance mismatch" local to this
module.
"""
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from logging import getLogger
from os.path import abspath, basename, dirname, join, exists
from . import instructions as inst
from .config import (always_copy as config_always_copy, channel_priority,
show_channel_urls as config_show_channel_urls,
root_dir, allow_softlinks, default_python, auto_update_conda,
track_features, foreign, url_channel, canonical_channel_name)
from .exceptions import CondaException
from .history import History
from .install import (dist2quad, LINK_HARD, link_name_map, name_dist, is_fetched,
is_extracted, is_linked, find_new_location, dist2filename, LINK_COPY,
LINK_SOFT, try_hard_link, rm_rf)
from .resolve import MatchSpec, Resolve, Package
from .utils import md5_file, human_bytes
# For backwards compatibility
log = getLogger(__name__)
def print_dists(dists_extras):
fmt = " %-27s|%17s"
print(fmt % ('package', 'build'))
print(fmt % ('-' * 27, '-' * 17))
for dist, extra in dists_extras:
dist = dist2quad(dist)
line = fmt % (dist[0]+'-'+dist[1], dist[2])
if extra:
line += extra
print(line)
def display_actions(actions, index, show_channel_urls=None):
if show_channel_urls is None:
show_channel_urls = config_show_channel_urls
def channel_str(rec):
if 'schannel' in rec:
return rec['schannel']
if 'url' in rec:
return url_channel(rec['url'])[1]
if 'channel' in rec:
return canonical_channel_name(rec['channel'])
return '<unknown>'
def channel_filt(s):
if show_channel_urls is False:
return ''
if show_channel_urls is None and s == 'defaults':
return ''
return s
if actions.get(inst.FETCH):
print("\nThe following packages will be downloaded:\n")
disp_lst = []
for dist in actions[inst.FETCH]:
info = index[dist + '.tar.bz2']
extra = '%15s' % human_bytes(info['size'])
schannel = channel_filt(channel_str(info))
if schannel:
extra += ' ' + schannel
disp_lst.append((dist, extra))
print_dists(disp_lst)
if index and len(actions[inst.FETCH]) > 1:
num_bytes = sum(index[dist + '.tar.bz2']['size']
for dist in actions[inst.FETCH])
print(' ' * 4 + '-' * 60)
print(" " * 43 + "Total: %14s" % human_bytes(num_bytes))
# package -> [oldver-oldbuild, newver-newbuild]
packages = defaultdict(lambda: list(('', '')))
features = defaultdict(lambda: list(('', '')))
channels = defaultdict(lambda: list(('', '')))
records = defaultdict(lambda: list((None, None)))
linktypes = {}
for arg in actions.get(inst.LINK, []):
dist, lt, shortcuts = inst.split_linkarg(arg)
fkey = dist + '.tar.bz2'
rec = index[fkey]
pkg = rec['name']
channels[pkg][1] = channel_str(rec)
packages[pkg][1] = rec['version'] + '-' + rec['build']
records[pkg][1] = Package(fkey, rec)
linktypes[pkg] = lt
features[pkg][1] = rec.get('features', '')
for arg in actions.get(inst.UNLINK, []):
dist, lt, shortcuts = inst.split_linkarg(arg)
fkey = dist + '.tar.bz2'
rec = index.get(fkey)
if rec is None:
pkg, ver, build, schannel = dist2quad(dist)
rec = dict(name=pkg, version=ver, build=build, channel=None,
schannel='<unknown>',
build_number=int(build) if build.isdigit() else 0)
pkg = rec['name']
channels[pkg][0] = channel_str(rec)
packages[pkg][0] = rec['version'] + '-' + rec['build']
records[pkg][0] = Package(fkey, rec)
features[pkg][0] = rec.get('features', '')
# Put a minimum length here---. .--For the :
# v v
new = {p for p in packages if not packages[p][0]}
removed = {p for p in packages if not packages[p][1]}
# New packages are actually listed in the left-hand column,
# so let's move them over there
for pkg in new:
for var in (packages, features, channels, records):
var[pkg] = var[pkg][::-1]
if packages:
maxpkg = max(len(p) for p in packages) + 1
maxoldver = max(len(p[0]) for p in packages.values())
maxnewver = max(len(p[1]) for p in packages.values())
maxoldfeatures = max(len(p[0]) for p in features.values())
maxnewfeatures = max(len(p[1]) for p in features.values())
maxoldchannels = max(len(channel_filt(p[0])) for p in channels.values())
maxnewchannels = max(len(channel_filt(p[1])) for p in channels.values())
updated = set()
downgraded = set()
channeled = set()
oldfmt = {}
newfmt = {}
for pkg in packages:
# That's right. I'm using old-style string formatting to generate a
# string with new-style string formatting.
oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
if maxoldchannels:
oldfmt[pkg] += ' {channels[0]:<%s}' % maxoldchannels
if features[pkg][0]:
oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
lt = linktypes.get(pkg, LINK_HARD)
lt = '' if lt == LINK_HARD else (' (%s)' % link_name_map[lt])
if pkg in removed or pkg in new:
oldfmt[pkg] += lt
continue
newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
if maxnewchannels:
newfmt[pkg] += ' {channels[1]:<%s}' % maxnewchannels
if features[pkg][1]:
newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
newfmt[pkg] += lt
P0 = records[pkg][0]
P1 = records[pkg][1]
pri0 = P0.priority
pri1 = P1.priority
if pri0 is None or pri1 is None:
pri0 = pri1 = 1
try:
if str(P1.version) == 'custom':
newver = str(P0.version) != 'custom'
oldver = not newver
else:
# <= here means that unchanged packages will be put in updated
newver = P0.norm_version < P1.norm_version
oldver = P0.norm_version > P1.norm_version
except TypeError:
newver = P0.version < P1.version
oldver = P0.version > P1.version
oldbld = P0.build_number > P1.build_number
if channel_priority and pri1 < pri0 and (oldver or not newver and oldbld):
channeled.add(pkg)
elif newver:
updated.add(pkg)
elif pri1 < pri0 and (oldver or not newver and oldbld):
channeled.add(pkg)
elif oldver:
downgraded.add(pkg)
elif not oldbld:
updated.add(pkg)
else:
downgraded.add(pkg)
arrow = ' --> '
lead = ' ' * 4
def format(s, pkg):
chans = [channel_filt(c) for c in channels[pkg]]
return lead + s.format(pkg=pkg + ':', vers=packages[pkg],
channels=chans, features=features[pkg])
if new:
print("\nThe following NEW packages will be INSTALLED:\n")
for pkg in sorted(new):
# New packages have been moved to the "old" column for display
print(format(oldfmt[pkg], pkg))
if removed:
print("\nThe following packages will be REMOVED:\n")
for pkg in sorted(removed):
print(format(oldfmt[pkg], pkg))
if updated:
print("\nThe following packages will be UPDATED:\n")
for pkg in sorted(updated):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
if channeled:
print("\nThe following packages will be SUPERCEDED by a higher-priority channel:\n")
for pkg in sorted(channeled):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
if downgraded:
print("\nThe following packages will be DOWNGRADED due to dependency conflicts:\n")
for pkg in sorted(downgraded):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
print()
def nothing_to_do(actions):
for op in inst.action_codes:
if actions.get(op):
return False
return True
def add_unlink(actions, dist):
if inst.UNLINK not in actions:
actions[inst.UNLINK] = []
actions[inst.UNLINK].append(dist)
def plan_from_actions(actions):
if 'op_order' in actions and actions['op_order']:
op_order = actions['op_order']
else:
op_order = inst.action_codes
assert inst.PREFIX in actions and actions[inst.PREFIX]
res = [('PREFIX', '%s' % actions[inst.PREFIX])]
if sys.platform == 'win32':
# Always link/unlink menuinst first on windows in case a subsequent
# package tries to import it to create/remove a shortcut
for op in (inst.UNLINK, inst.FETCH, inst.EXTRACT, inst.LINK):
if op in actions:
pkgs = []
for pkg in actions[op]:
if 'menuinst' in pkg:
res.append((op, pkg))
else:
pkgs.append(pkg)
actions[op] = pkgs
for op in op_order:
if op not in actions:
continue
if not actions[op]:
continue
if '_' not in op:
res.append((inst.PRINT, '%sing packages ...' % op.capitalize()))
elif op.startswith('RM_'):
res.append((inst.PRINT, 'Pruning %s packages from the cache ...' % op[3:].lower()))
if op in inst.progress_cmds:
res.append((inst.PROGRESS, '%d' % len(actions[op])))
for arg in actions[op]:
res.append((op, arg))
return res
# force_linked_actions has now been folded into this function, and is enabled by
# supplying an index and setting force=True
def ensure_linked_actions(dists, prefix, index=None, force=False,
always_copy=False, shortcuts=False):
actions = defaultdict(list)
actions[inst.PREFIX] = prefix
actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED,
inst.EXTRACT, inst.UNLINK, inst.LINK)
for dist in dists:
fetched_in = is_fetched(dist)
extracted_in = is_extracted(dist)
if fetched_in and index is not None:
# Test the MD5, and possibly re-fetch
fn = dist + '.tar.bz2'
try:
if md5_file(fetched_in) != index[fn]['md5']:
# RM_FETCHED now removes the extracted data too
actions[inst.RM_FETCHED].append(dist)
# Re-fetch, re-extract, re-link
fetched_in = extracted_in = None
force = True
except KeyError:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if not force and is_linked(prefix, dist):
continue
if extracted_in and force:
# Always re-extract in the force case
actions[inst.RM_EXTRACTED].append(dist)
extracted_in = None
# Otherwise we need to extract, and possibly fetch
if not extracted_in and not fetched_in:
# If there is a cache conflict, clean it up
fetched_in, conflict = find_new_location(dist)
fetched_in = join(fetched_in, dist2filename(dist))
if conflict is not None:
actions[inst.RM_FETCHED].append(conflict)
actions[inst.FETCH].append(dist)
if not extracted_in:
actions[inst.EXTRACT].append(dist)
fetched_dist = extracted_in or fetched_in[:-8]
fetched_dir = dirname(fetched_dist)
try:
# Determine what kind of linking is necessary
if not extracted_in:
# If not already extracted, create some dummy
# data to test with
rm_rf(fetched_dist)
ppath = join(fetched_dist, 'info')
os.makedirs(ppath)
index_json = join(ppath, 'index.json')
with open(index_json, 'w'):
pass
if config_always_copy or always_copy:
lt = LINK_COPY
elif try_hard_link(fetched_dir, prefix, dist):
lt = LINK_HARD
elif allow_softlinks and sys.platform != 'win32':
lt = LINK_SOFT
else:
lt = LINK_COPY
actions[inst.LINK].append('%s %d %s' % (dist, lt, shortcuts))
except (OSError, IOError):
actions[inst.LINK].append('%s %d %s' % (dist, LINK_COPY, shortcuts))
finally:
if not extracted_in:
# Remove the dummy data
try:
rm_rf(fetched_dist)
except (OSError, IOError):
pass
return actions
# -------------------------------------------------------------------
def is_root_prefix(prefix):
return abspath(prefix) == abspath(root_dir)
def add_defaults_to_specs(r, linked, specs, update=False):
# TODO: This should use the pinning mechanism. But don't change the API:
# cas uses it.
if r.explicit(specs):
return
log.debug('H0 specs=%r' % specs)
linked = [d if d.endswith('.tar.bz2') else d + '.tar.bz2' for d in linked]
names_linked = {r.index[fn]['name']: fn for fn in linked if fn in r.index}
mspecs = list(map(MatchSpec, specs))
for name, def_ver in [('python', default_python),
# Default version required, but only used for Python
('lua', None)]:
if any(s.name == name and not s.is_simple() for s in mspecs):
# if any of the specifications mention the Python/Numpy version,
# we don't need to add the default spec
log.debug('H1 %s' % name)
continue
depends_on = {s for s in mspecs if r.depends_on(s, name)}
any_depends_on = bool(depends_on)
log.debug('H2 %s %s' % (name, any_depends_on))
if not any_depends_on:
# if nothing depends on Python/Numpy AND the Python/Numpy is not
# specified, we don't need to add the default spec
log.debug('H2A %s' % name)
continue
if any(s.is_exact() for s in depends_on):
# If something depends on Python/Numpy, but the spec is very
# explicit, we also don't need to add the default spec
log.debug('H2B %s' % name)
continue
if name in names_linked:
# if Python/Numpy is already linked, we add that instead of the
# default
log.debug('H3 %s' % name)
fkey = names_linked[name]
info = r.index[fkey]
ver = '.'.join(info['version'].split('.', 2)[:2])
spec = '%s %s* (target=%s)' % (info['name'], ver, fkey)
specs.append(spec)
continue
if name == 'python' and def_ver.startswith('3.'):
# Don't include Python 3 in the specs if this is the Python 3
# version of conda.
continue
if def_ver is not None:
specs.append('%s %s*' % (name, def_ver))
log.debug('HF specs=%r' % specs)
def get_pinned_specs(prefix):
pinfile = join(prefix, 'conda-meta', 'pinned')
if not exists(pinfile):
return []
with open(pinfile) as f:
return [i for i in f.read().strip().splitlines() if i and not i.strip().startswith('#')]
def install_actions(prefix, index, specs, force=False, only_names=None, always_copy=False,
pinned=True, minimal_hint=False, update_deps=True, prune=False,
shortcuts=False):
r = Resolve(index)
linked = r.installed
if auto_update_conda and is_root_prefix(prefix):
specs.append('conda')
if pinned:
pinned_specs = get_pinned_specs(prefix)
log.debug("Pinned specs=%s" % pinned_specs)
specs += pinned_specs
must_have = {}
if track_features:
specs.extend(x + '@' for x in track_features)
pkgs = r.install(specs, linked, update_deps=update_deps)
for fn in pkgs:
dist = fn[:-8]
name = name_dist(dist)
if not name or only_names and name not in only_names:
continue
must_have[name] = dist
if is_root_prefix(prefix):
for name in foreign:
if name in must_have:
del must_have[name]
elif basename(prefix).startswith('_'):
# anything (including conda) can be installed into environments
# starting with '_', mainly to allow conda-build to build conda
pass
else:
# disallow conda from being installed into all other environments
if 'conda' in must_have or 'conda-env' in must_have:
sys.exit("Error: 'conda' can only be installed into the "
"root environment")
smh = r.dependency_sort(must_have)
actions = ensure_linked_actions(
smh, prefix,
index=index if force else None,
force=force, always_copy=always_copy,
shortcuts=shortcuts)
if actions[inst.LINK]:
actions[inst.SYMLINK_CONDA] = [root_dir]
for fkey in sorted(linked):
dist = fkey[:-8]
name = name_dist(dist)
replace_existing = name in must_have and dist != must_have[name]
prune_it = prune and dist not in smh
if replace_existing or prune_it:
add_unlink(actions, dist)
return actions
def remove_actions(prefix, specs, index, force=False, pinned=True):
r = Resolve(index)
linked = r.installed
if force:
mss = list(map(MatchSpec, specs))
nlinked = {r.package_name(fn): fn[:-8]
for fn in linked
if not any(r.match(ms, fn) for ms in mss)}
else:
add_defaults_to_specs(r, linked, specs, update=True)
nlinked = {r.package_name(fn): fn[:-8] for fn in r.remove(specs, linked)}
if pinned:
pinned_specs = get_pinned_specs(prefix)
log.debug("Pinned specs=%s" % pinned_specs)
linked = {r.package_name(fn): fn[:-8] for fn in linked}
actions = ensure_linked_actions(r.dependency_sort(nlinked), prefix)
for old_fn in reversed(r.dependency_sort(linked)):
dist = old_fn + '.tar.bz2'
name = r.package_name(dist)
if old_fn == nlinked.get(name, ''):
continue
if pinned and any(r.match(ms, dist) for ms in pinned_specs):
msg = "Cannot remove %s becaue it is pinned. Use --no-pin to override."
raise RuntimeError(msg % dist)
if name == 'conda' and name not in nlinked:
if any(s.split(' ', 1)[0] == 'conda' for s in specs):
sys.exit("Error: 'conda' cannot be removed from the root environment")
else:
msg = ("Error: this 'remove' command cannot be executed because it\n"
"would require removing 'conda' dependencies")
sys.exit(msg)
add_unlink(actions, old_fn)
return actions
def remove_features_actions(prefix, index, features):
r = Resolve(index)
linked = r.installed
actions = defaultdict(list)
actions[inst.PREFIX] = prefix
_linked = [d + '.tar.bz2' for d in linked]
to_link = []
for dist in sorted(linked):
fn = dist + '.tar.bz2'
if fn not in index:
continue
if r.track_features(fn).intersection(features):
add_unlink(actions, dist)
if r.features(fn).intersection(features):
add_unlink(actions, dist)
subst = r.find_substitute(_linked, features, fn)
if subst:
to_link.append(subst[:-8])
if to_link:
actions.update(ensure_linked_actions(to_link, prefix))
return actions
def revert_actions(prefix, revision=-1):
h = History(prefix)
h.update()
try:
state = h.get_state(revision)
except IndexError:
sys.exit("Error: no such revision: %d" % revision)
curr = h.get_state()
if state == curr:
return {}
actions = ensure_linked_actions(state, prefix)
for dist in curr - state:
add_unlink(actions, dist)
return actions
# ---------------------------- EXECUTION --------------------------
def execute_actions(actions, index=None, verbose=False):
plan = plan_from_actions(actions)
with History(actions[inst.PREFIX]):
inst.execute_instructions(plan, index, verbose)
def update_old_plan(old_plan):
"""
Update an old plan object to work with
`conda.instructions.execute_instructions`
"""
plan = []
for line in old_plan:
if line.startswith('#'):
continue
if ' ' not in line:
raise CondaException(
"The instruction '%s' takes at least one argument" % line
)
instruction, arg = line.split(' ', 1)
plan.append((instruction, arg))
return plan
def execute_plan(old_plan, index=None, verbose=False):
"""
Deprecated: This should `conda.instructions.execute_instructions` instead
"""
plan = update_old_plan(old_plan)
inst.execute_instructions(plan, index, verbose)
if __name__ == '__main__':
# for testing new revert_actions() only
from pprint import pprint
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
<|code_end|>
| conda/plan.py
"""
Handle the planning of installs and their execution.
NOTE:
conda.install uses canonical package names in its interface functions,
whereas conda.resolve uses package filenames, as those are used as index
keys. We try to keep fixes to this "impedance mismatch" local to this
module.
"""
from __future__ import print_function, division, absolute_import
import os
import sys
from collections import defaultdict
from logging import getLogger
from os.path import abspath, basename, dirname, join, exists
from . import instructions as inst
from .config import (always_copy as config_always_copy, channel_priority,
show_channel_urls as config_show_channel_urls,
root_dir, allow_softlinks, default_python, auto_update_conda,
track_features, foreign, url_channel, canonical_channel_name)
from .exceptions import CondaException
from .history import History
from .install import (dist2quad, LINK_HARD, link_name_map, name_dist, is_fetched,
is_extracted, is_linked, find_new_location, dist2filename, LINK_COPY,
LINK_SOFT, try_hard_link, rm_rf)
from .resolve import MatchSpec, Resolve, Package
from .utils import md5_file, human_bytes
# For backwards compatibility
log = getLogger(__name__)
def print_dists(dists_extras):
fmt = " %-27s|%17s"
print(fmt % ('package', 'build'))
print(fmt % ('-' * 27, '-' * 17))
for dist, extra in dists_extras:
dist = dist2quad(dist)
line = fmt % (dist[0]+'-'+dist[1], dist[2])
if extra:
line += extra
print(line)
def display_actions(actions, index, show_channel_urls=None):
if show_channel_urls is None:
show_channel_urls = config_show_channel_urls
def channel_str(rec):
if 'schannel' in rec:
return rec['schannel']
if 'url' in rec:
return url_channel(rec['url'])[1]
if 'channel' in rec:
return canonical_channel_name(rec['channel'])
return '<unknown>'
def channel_filt(s):
if show_channel_urls is False:
return ''
if show_channel_urls is None and s == 'defaults':
return ''
return s
if actions.get(inst.FETCH):
print("\nThe following packages will be downloaded:\n")
disp_lst = []
for dist in actions[inst.FETCH]:
info = index[dist + '.tar.bz2']
extra = '%15s' % human_bytes(info['size'])
schannel = channel_filt(channel_str(info))
if schannel:
extra += ' ' + schannel
disp_lst.append((dist, extra))
print_dists(disp_lst)
if index and len(actions[inst.FETCH]) > 1:
num_bytes = sum(index[dist + '.tar.bz2']['size']
for dist in actions[inst.FETCH])
print(' ' * 4 + '-' * 60)
print(" " * 43 + "Total: %14s" % human_bytes(num_bytes))
# package -> [oldver-oldbuild, newver-newbuild]
packages = defaultdict(lambda: list(('', '')))
features = defaultdict(lambda: list(('', '')))
channels = defaultdict(lambda: list(('', '')))
records = defaultdict(lambda: list((None, None)))
linktypes = {}
for arg in actions.get(inst.LINK, []):
dist, lt, shortcuts = inst.split_linkarg(arg)
fkey = dist + '.tar.bz2'
rec = index[fkey]
pkg = rec['name']
channels[pkg][1] = channel_str(rec)
packages[pkg][1] = rec['version'] + '-' + rec['build']
records[pkg][1] = Package(fkey, rec)
linktypes[pkg] = lt
features[pkg][1] = rec.get('features', '')
for arg in actions.get(inst.UNLINK, []):
dist, lt, shortcuts = inst.split_linkarg(arg)
fkey = dist + '.tar.bz2'
rec = index.get(fkey)
if rec is None:
pkg, ver, build, schannel = dist2quad(dist)
rec = dict(name=pkg, version=ver, build=build, channel=None,
schannel='<unknown>',
build_number=int(build) if build.isdigit() else 0)
pkg = rec['name']
channels[pkg][0] = channel_str(rec)
packages[pkg][0] = rec['version'] + '-' + rec['build']
records[pkg][0] = Package(fkey, rec)
features[pkg][0] = rec.get('features', '')
# Put a minimum length here---. .--For the :
# v v
new = {p for p in packages if not packages[p][0]}
removed = {p for p in packages if not packages[p][1]}
# New packages are actually listed in the left-hand column,
# so let's move them over there
for pkg in new:
for var in (packages, features, channels, records):
var[pkg] = var[pkg][::-1]
if packages:
maxpkg = max(len(p) for p in packages) + 1
maxoldver = max(len(p[0]) for p in packages.values())
maxnewver = max(len(p[1]) for p in packages.values())
maxoldfeatures = max(len(p[0]) for p in features.values())
maxnewfeatures = max(len(p[1]) for p in features.values())
maxoldchannels = max(len(channel_filt(p[0])) for p in channels.values())
maxnewchannels = max(len(channel_filt(p[1])) for p in channels.values())
updated = set()
downgraded = set()
channeled = set()
oldfmt = {}
newfmt = {}
for pkg in packages:
# That's right. I'm using old-style string formatting to generate a
# string with new-style string formatting.
oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
if maxoldchannels:
oldfmt[pkg] += ' {channels[0]:<%s}' % maxoldchannels
if features[pkg][0]:
oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
lt = linktypes.get(pkg, LINK_HARD)
lt = '' if lt == LINK_HARD else (' (%s)' % link_name_map[lt])
if pkg in removed or pkg in new:
oldfmt[pkg] += lt
continue
newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
if maxnewchannels:
newfmt[pkg] += ' {channels[1]:<%s}' % maxnewchannels
if features[pkg][1]:
newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
newfmt[pkg] += lt
P0 = records[pkg][0]
P1 = records[pkg][1]
pri0 = P0.priority
pri1 = P1.priority
if pri0 is None or pri1 is None:
pri0 = pri1 = 1
try:
if str(P1.version) == 'custom':
newver = str(P0.version) != 'custom'
oldver = not newver
else:
# <= here means that unchanged packages will be put in updated
newver = P0.norm_version < P1.norm_version
oldver = P0.norm_version > P1.norm_version
except TypeError:
newver = P0.version < P1.version
oldver = P0.version > P1.version
oldbld = P0.build_number > P1.build_number
if channel_priority and pri1 < pri0 and (oldver or not newver and oldbld):
channeled.add(pkg)
elif newver:
updated.add(pkg)
elif pri1 < pri0 and (oldver or not newver and oldbld):
channeled.add(pkg)
elif oldver:
downgraded.add(pkg)
elif not oldbld:
updated.add(pkg)
else:
downgraded.add(pkg)
arrow = ' --> '
lead = ' ' * 4
def format(s, pkg):
chans = [channel_filt(c) for c in channels[pkg]]
return lead + s.format(pkg=pkg + ':', vers=packages[pkg],
channels=chans, features=features[pkg])
if new:
print("\nThe following NEW packages will be INSTALLED:\n")
for pkg in sorted(new):
# New packages have been moved to the "old" column for display
print(format(oldfmt[pkg], pkg))
if removed:
print("\nThe following packages will be REMOVED:\n")
for pkg in sorted(removed):
print(format(oldfmt[pkg], pkg))
if updated:
print("\nThe following packages will be UPDATED:\n")
for pkg in sorted(updated):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
if channeled:
print("\nThe following packages will be SUPERCEDED by a higher-priority channel:\n")
for pkg in sorted(channeled):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
if downgraded:
print("\nThe following packages will be DOWNGRADED due to dependency conflicts:\n")
for pkg in sorted(downgraded):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
print()
def nothing_to_do(actions):
for op in inst.action_codes:
if actions.get(op):
return False
return True
def add_unlink(actions, dist):
if inst.UNLINK not in actions:
actions[inst.UNLINK] = []
actions[inst.UNLINK].append(dist)
def plan_from_actions(actions):
if 'op_order' in actions and actions['op_order']:
op_order = actions['op_order']
else:
op_order = inst.action_codes
assert inst.PREFIX in actions and actions[inst.PREFIX]
res = [('PREFIX', '%s' % actions[inst.PREFIX])]
if sys.platform == 'win32':
# Always link/unlink menuinst first on windows in case a subsequent
# package tries to import it to create/remove a shortcut
for op in (inst.UNLINK, inst.FETCH, inst.EXTRACT, inst.LINK):
if op in actions:
pkgs = []
for pkg in actions[op]:
if 'menuinst' in pkg:
res.append((op, pkg))
else:
pkgs.append(pkg)
actions[op] = pkgs
for op in op_order:
if op not in actions:
continue
if not actions[op]:
continue
if '_' not in op:
res.append((inst.PRINT, '%sing packages ...' % op.capitalize()))
elif op.startswith('RM_'):
res.append((inst.PRINT, 'Pruning %s packages from the cache ...' % op[3:].lower()))
if op in inst.progress_cmds:
res.append((inst.PROGRESS, '%d' % len(actions[op])))
for arg in actions[op]:
res.append((op, arg))
return res
# force_linked_actions has now been folded into this function, and is enabled by
# supplying an index and setting force=True
def ensure_linked_actions(dists, prefix, index=None, force=False,
always_copy=False, shortcuts=False):
actions = defaultdict(list)
actions[inst.PREFIX] = prefix
actions['op_order'] = (inst.RM_FETCHED, inst.FETCH, inst.RM_EXTRACTED,
inst.EXTRACT, inst.UNLINK, inst.LINK)
for dist in dists:
fetched_in = is_fetched(dist)
extracted_in = is_extracted(dist)
if fetched_in and index is not None:
# Test the MD5, and possibly re-fetch
fn = dist + '.tar.bz2'
try:
if md5_file(fetched_in) != index[fn]['md5']:
# RM_FETCHED now removes the extracted data too
actions[inst.RM_FETCHED].append(dist)
# Re-fetch, re-extract, re-link
fetched_in = extracted_in = None
force = True
except KeyError:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if not force and is_linked(prefix, dist):
continue
if extracted_in and force:
# Always re-extract in the force case
actions[inst.RM_EXTRACTED].append(dist)
extracted_in = None
# Otherwise we need to extract, and possibly fetch
if not extracted_in and not fetched_in:
# If there is a cache conflict, clean it up
fetched_in, conflict = find_new_location(dist)
fetched_in = join(fetched_in, dist2filename(dist))
if conflict is not None:
actions[inst.RM_FETCHED].append(conflict)
actions[inst.FETCH].append(dist)
if not extracted_in:
actions[inst.EXTRACT].append(dist)
fetched_dist = extracted_in or fetched_in[:-8]
fetched_dir = dirname(fetched_dist)
try:
# Determine what kind of linking is necessary
if not extracted_in:
# If not already extracted, create some dummy
# data to test with
rm_rf(fetched_dist)
ppath = join(fetched_dist, 'info')
os.makedirs(ppath)
index_json = join(ppath, 'index.json')
with open(index_json, 'w'):
pass
if config_always_copy or always_copy:
lt = LINK_COPY
elif try_hard_link(fetched_dir, prefix, dist):
lt = LINK_HARD
elif allow_softlinks and sys.platform != 'win32':
lt = LINK_SOFT
else:
lt = LINK_COPY
actions[inst.LINK].append('%s %d %s' % (dist, lt, shortcuts))
except (OSError, IOError):
actions[inst.LINK].append('%s %d %s' % (dist, LINK_COPY, shortcuts))
finally:
if not extracted_in:
# Remove the dummy data
try:
rm_rf(fetched_dist)
except (OSError, IOError):
pass
return actions
# -------------------------------------------------------------------
def is_root_prefix(prefix):
return abspath(prefix) == abspath(root_dir)
def add_defaults_to_specs(r, linked, specs, update=False):
# TODO: This should use the pinning mechanism. But don't change the API:
# cas uses it.
if r.explicit(specs):
return
log.debug('H0 specs=%r' % specs)
linked = [d if d.endswith('.tar.bz2') else d + '.tar.bz2' for d in linked]
names_linked = {r.index[fn]['name']: fn for fn in linked if fn in r.index}
mspecs = list(map(MatchSpec, specs))
for name, def_ver in [('python', default_python),
# Default version required, but only used for Python
('lua', None)]:
if any(s.name == name and not s.is_simple() for s in mspecs):
# if any of the specifications mention the Python/Numpy version,
# we don't need to add the default spec
log.debug('H1 %s' % name)
continue
depends_on = {s for s in mspecs if r.depends_on(s, name)}
any_depends_on = bool(depends_on)
log.debug('H2 %s %s' % (name, any_depends_on))
if not any_depends_on:
# if nothing depends on Python/Numpy AND the Python/Numpy is not
# specified, we don't need to add the default spec
log.debug('H2A %s' % name)
continue
if any(s.is_exact() for s in depends_on):
# If something depends on Python/Numpy, but the spec is very
# explicit, we also don't need to add the default spec
log.debug('H2B %s' % name)
continue
if name in names_linked:
# if Python/Numpy is already linked, we add that instead of the
# default
log.debug('H3 %s' % name)
fkey = names_linked[name]
info = r.index[fkey]
ver = '.'.join(info['version'].split('.', 2)[:2])
spec = '%s %s* (target=%s)' % (info['name'], ver, fkey)
specs.append(spec)
continue
if name == 'python' and def_ver.startswith('3.'):
# Don't include Python 3 in the specs if this is the Python 3
# version of conda.
continue
if def_ver is not None:
specs.append('%s %s*' % (name, def_ver))
log.debug('HF specs=%r' % specs)
def get_pinned_specs(prefix):
pinfile = join(prefix, 'conda-meta', 'pinned')
if not exists(pinfile):
return []
with open(pinfile) as f:
return [i for i in f.read().strip().splitlines() if i and not i.strip().startswith('#')]
def install_actions(prefix, index, specs, force=False, only_names=None, always_copy=False,
pinned=True, minimal_hint=False, update_deps=True, prune=False,
shortcuts=False):
r = Resolve(index)
linked = r.installed
if auto_update_conda and is_root_prefix(prefix):
specs.append('conda')
specs.append('conda-env')
if pinned:
pinned_specs = get_pinned_specs(prefix)
log.debug("Pinned specs=%s" % pinned_specs)
specs += pinned_specs
must_have = {}
if track_features:
specs.extend(x + '@' for x in track_features)
pkgs = r.install(specs, linked, update_deps=update_deps)
for fn in pkgs:
dist = fn[:-8]
name = name_dist(dist)
if not name or only_names and name not in only_names:
continue
must_have[name] = dist
if is_root_prefix(prefix):
for name in foreign:
if name in must_have:
del must_have[name]
elif basename(prefix).startswith('_'):
# anything (including conda) can be installed into environments
# starting with '_', mainly to allow conda-build to build conda
pass
else:
# disallow conda from being installed into all other environments
if 'conda' in must_have or 'conda-env' in must_have:
sys.exit("Error: 'conda' can only be installed into the "
"root environment")
smh = r.dependency_sort(must_have)
actions = ensure_linked_actions(
smh, prefix,
index=index if force else None,
force=force, always_copy=always_copy,
shortcuts=shortcuts)
if actions[inst.LINK]:
actions[inst.SYMLINK_CONDA] = [root_dir]
for fkey in sorted(linked):
dist = fkey[:-8]
name = name_dist(dist)
replace_existing = name in must_have and dist != must_have[name]
prune_it = prune and dist not in smh
if replace_existing or prune_it:
add_unlink(actions, dist)
return actions
def remove_actions(prefix, specs, index, force=False, pinned=True):
r = Resolve(index)
linked = r.installed
if force:
mss = list(map(MatchSpec, specs))
nlinked = {r.package_name(fn): fn[:-8]
for fn in linked
if not any(r.match(ms, fn) for ms in mss)}
else:
add_defaults_to_specs(r, linked, specs, update=True)
nlinked = {r.package_name(fn): fn[:-8] for fn in r.remove(specs, linked)}
if pinned:
pinned_specs = get_pinned_specs(prefix)
log.debug("Pinned specs=%s" % pinned_specs)
linked = {r.package_name(fn): fn[:-8] for fn in linked}
actions = ensure_linked_actions(r.dependency_sort(nlinked), prefix)
for old_fn in reversed(r.dependency_sort(linked)):
dist = old_fn + '.tar.bz2'
name = r.package_name(dist)
if old_fn == nlinked.get(name, ''):
continue
if pinned and any(r.match(ms, dist) for ms in pinned_specs):
msg = "Cannot remove %s becaue it is pinned. Use --no-pin to override."
raise RuntimeError(msg % dist)
if name == 'conda' and name not in nlinked:
if any(s.split(' ', 1)[0] == 'conda' for s in specs):
sys.exit("Error: 'conda' cannot be removed from the root environment")
else:
msg = ("Error: this 'remove' command cannot be executed because it\n"
"would require removing 'conda' dependencies")
sys.exit(msg)
add_unlink(actions, old_fn)
return actions
def remove_features_actions(prefix, index, features):
r = Resolve(index)
linked = r.installed
actions = defaultdict(list)
actions[inst.PREFIX] = prefix
_linked = [d + '.tar.bz2' for d in linked]
to_link = []
for dist in sorted(linked):
fn = dist + '.tar.bz2'
if fn not in index:
continue
if r.track_features(fn).intersection(features):
add_unlink(actions, dist)
if r.features(fn).intersection(features):
add_unlink(actions, dist)
subst = r.find_substitute(_linked, features, fn)
if subst:
to_link.append(subst[:-8])
if to_link:
actions.update(ensure_linked_actions(to_link, prefix))
return actions
def revert_actions(prefix, revision=-1):
h = History(prefix)
h.update()
try:
state = h.get_state(revision)
except IndexError:
sys.exit("Error: no such revision: %d" % revision)
curr = h.get_state()
if state == curr:
return {}
actions = ensure_linked_actions(state, prefix)
for dist in curr - state:
add_unlink(actions, dist)
return actions
# ---------------------------- EXECUTION --------------------------
def execute_actions(actions, index=None, verbose=False):
plan = plan_from_actions(actions)
with History(actions[inst.PREFIX]):
inst.execute_instructions(plan, index, verbose)
def update_old_plan(old_plan):
"""
Update an old plan object to work with
`conda.instructions.execute_instructions`
"""
plan = []
for line in old_plan:
if line.startswith('#'):
continue
if ' ' not in line:
raise CondaException(
"The instruction '%s' takes at least one argument" % line
)
instruction, arg = line.split(' ', 1)
plan.append((instruction, arg))
return plan
def execute_plan(old_plan, index=None, verbose=False):
"""
Deprecated: This should `conda.instructions.execute_instructions` instead
"""
plan = update_old_plan(old_plan)
inst.execute_instructions(plan, index, verbose)
if __name__ == '__main__':
# for testing new revert_actions() only
from pprint import pprint
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
| conda/plan.py
--- a/conda/plan.py
+++ b/conda/plan.py
@@ -442,6 +442,7 @@ def install_actions(prefix, index, specs, force=False, only_names=None, always_c
if auto_update_conda and is_root_prefix(prefix):
specs.append('conda')
+ specs.append('conda-env')
if pinned:
pinned_specs = get_pinned_specs(prefix) |
source activate without arguments breaks source activate/deactivate commands
Running `source activate` without any further arguments on Linux with the latest release makes it so that further activate/deactivate commands do not work. For example:
```
ihenriksen@ubuntu:~$ source activate
prepending /home/ihenriksen/miniconda2/envs/bin to PATH
ihenriksen@ubuntu:~$ source activate py27
bash: activate: No such file or directory
ihenriksen@ubuntu:~$ source deactivate
bash: deactivate: No such file or directory
ihenriksen@ubuntu:~$
```
This isn't at all a blocking issue for me, but it seems like something worth revisiting when we can.
| conda/cli/activate.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import (shells, run_in)
on_win = sys.platform == "win32"
def help(command, shell):
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
if command in ('..activate', '..checkenv'):
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
Adds the 'Scripts' and 'Library\\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
Adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
elif command == '..deactivate':
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the environment prefix, 'Scripts' and 'Library\\bin' directory
of the environment ENV from the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
Removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
else:
sys.exit("No help available for command %s" % sys.argv[1])
def prefix_from_arg(arg, shelldict):
'Returns a platform-native path'
# MSYS2 converts Unix paths to Windows paths with unix seps
# so we must check for the drive identifier too.
if shelldict['sep'] in arg and not re.match('[a-zA-Z]:', arg):
# strip is removing " marks, not \ - look carefully
native_path = shelldict['path_from'](arg)
if isdir(abspath(native_path.strip("\""))):
prefix = abspath(native_path.strip("\""))
else:
raise ValueError('could not find environment: %s' % native_path)
else:
prefix = find_prefix_name(arg.replace('/', os.path.sep))
if prefix is None:
raise ValueError('could not find environment: %s' % arg)
return prefix
def binpath_from_arg(arg, shelldict):
# prefix comes back as platform-native path
prefix = prefix_from_arg(arg, shelldict=shelldict)
if sys.platform == 'win32':
paths = [
prefix.rstrip("\\"),
os.path.join(prefix, 'Library', 'mingw-w64', 'bin'),
os.path.join(prefix, 'Library', 'usr', 'bin'),
os.path.join(prefix, 'Library', 'bin'),
os.path.join(prefix, 'Scripts'),
]
else:
paths = [
os.path.join(prefix, 'bin'),
]
# convert paths to shell-native paths
return [shelldict['path_to'](path) for path in paths]
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def get_path(shelldict):
"""Get path using a subprocess call.
os.getenv path isn't good for us, since bash on windows has a wildly different
path from Windows.
This returns PATH in the native representation of the shell - not necessarily
the native representation of the platform
"""
return run_in(shelldict["printpath"], shelldict)[0]
def main():
from conda.config import root_env_name, root_dir, changeps1
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
# all execution paths sys.exit at end.
help(sys.argv[1], sys.argv[2])
shell = sys.argv[2]
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
if len(sys.argv) == 3 or sys.argv[3].lower() == root_env_name.lower():
binpath = binpath_from_arg(root_env_name, shelldict=shelldict)
rootpath = None
elif len(sys.argv) == 4:
binpath = binpath_from_arg(sys.argv[3], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
sys.exit("Error: did not expect more than one argument")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
# Clear the root path if it is present
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
path = path.lstrip()
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
# Clean up any doubled-up path separators
path = path.replace(shelldict['pathsep'] * 2, shelldict['pathsep'])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 4:
sys.argv.append(root_env_name)
if len(sys.argv) > 4:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[3].lower() == root_env_name.lower():
# no need to check root env and try to install a symlink there
sys.exit(0)
# this should throw an error and exit if the env or path can't be found.
try:
prefix = prefix_from_arg(sys.argv[3], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}.\n"
"User does not have write access for conda symlinks."
.format(sys.argv[2]))
sys.exit(msg)
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
# DO NOT use os.getenv for this. One Windows especially, it shows cmd.exe settings
# for bash shells. This method uses the shell directly.
path = os.getenv(shelldict['promptvar'], '')
# failsafes
if not path:
if shelldict['exe'] == 'cmd.exe':
path = '$P$G'
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[3]
if changeps1 and env_path:
path = "(({0})) {1}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell
# script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/cli/activate.py
from __future__ import print_function, division, absolute_import
import errno
import os
from os.path import isdir, abspath
import re
import sys
from conda.cli.common import find_prefix_name
from conda.utils import (shells, run_in)
on_win = sys.platform == "win32"
def help(command, shell):
# sys.argv[1] will be ..checkenv in activate if an environment is already
# activated
# get grandparent process name to see which shell we're using
if command in ('..activate', '..checkenv'):
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: activate ENV
Adds the 'Scripts' and 'Library\\bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
else:
sys.exit("""Usage: source activate ENV
Adds the 'bin' directory of the environment ENV to the front of PATH.
ENV may either refer to just the name of the environment, or the full
prefix path.""")
elif command == '..deactivate':
if shell in ["cmd.exe", "powershell.exe"]:
sys.exit("""Usage: deactivate
Removes the environment prefix, 'Scripts' and 'Library\\bin' directory
of the environment ENV from the front of PATH.""")
else:
sys.exit("""Usage: source deactivate
Removes the 'bin' directory of the environment activated with 'source
activate' from PATH. """)
else:
sys.exit("No help available for command %s" % sys.argv[1])
def prefix_from_arg(arg, shelldict):
'Returns a platform-native path'
# MSYS2 converts Unix paths to Windows paths with unix seps
# so we must check for the drive identifier too.
if shelldict['sep'] in arg and not re.match('[a-zA-Z]:', arg):
# strip is removing " marks, not \ - look carefully
native_path = shelldict['path_from'](arg)
if isdir(abspath(native_path.strip("\""))):
prefix = abspath(native_path.strip("\""))
else:
raise ValueError('could not find environment: %s' % native_path)
else:
prefix = find_prefix_name(arg.replace('/', os.path.sep))
if prefix is None:
raise ValueError('could not find environment: %s' % arg)
return prefix
def binpath_from_arg(arg, shelldict):
# prefix comes back as platform-native path
prefix = prefix_from_arg(arg, shelldict=shelldict)
if sys.platform == 'win32':
paths = [
prefix.rstrip("\\"),
os.path.join(prefix, 'Library', 'mingw-w64', 'bin'),
os.path.join(prefix, 'Library', 'usr', 'bin'),
os.path.join(prefix, 'Library', 'bin'),
os.path.join(prefix, 'Scripts'),
]
else:
paths = [
os.path.join(prefix, 'bin'),
]
# convert paths to shell-native paths
return [shelldict['path_to'](path) for path in paths]
def pathlist_to_str(paths, escape_backslashes=True):
"""
Format a path list, e.g., of bin paths to be added or removed,
for user-friendly output.
"""
path = ' and '.join(paths)
if on_win and escape_backslashes:
# escape for printing to console - ends up as single \
path = re.sub(r'(?<!\\)\\(?!\\)', r'\\\\', path)
else:
path = path.replace("\\\\", "\\")
return path
def get_path(shelldict):
"""Get path using a subprocess call.
os.getenv path isn't good for us, since bash on windows has a wildly different
path from Windows.
This returns PATH in the native representation of the shell - not necessarily
the native representation of the platform
"""
return run_in(shelldict["printpath"], shelldict)[0]
def main():
from conda.config import root_env_name, root_dir, changeps1
import conda.install
if '-h' in sys.argv or '--help' in sys.argv:
# all execution paths sys.exit at end.
help(sys.argv[1], sys.argv[2])
shell = sys.argv[2]
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
if len(sys.argv) == 4:
binpath = binpath_from_arg(sys.argv[3], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
sys.exit("Error: ..activate expected exactly two arguments: shell and env name")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
# Clear the root path if it is present
if rootpath:
path = path.replace(shelldict['pathsep'].join(rootpath), "")
# prepend our new entries onto the existing path and make sure that the separator is native
path = shelldict['pathsep'].join(binpath + [path, ])
# deactivation is handled completely in shell scripts - it restores backups of env variables.
# It is done in shell scripts because they handle state much better than we can here.
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 4:
sys.exit("Invalid arguments to checkenv. Need shell and env name/path")
if len(sys.argv) > 4:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[3].lower() == root_env_name.lower():
# no need to check root env and try to install a symlink there
sys.exit(0)
# this should throw an error and exit if the env or path can't be found.
try:
prefix = prefix_from_arg(sys.argv[3], shelldict=shelldict)
except ValueError as e:
sys.exit(getattr(e, 'message', e))
# Make sure an env always has the conda symlink
try:
conda.install.symlink_conda(prefix, root_dir, shell)
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
msg = ("Cannot activate environment {0}, not have write access to conda symlink"
.format(sys.argv[2]))
sys.exit(msg)
raise
sys.exit(0)
elif sys.argv[1] == '..setps1':
# path is a bit of a misnomer here. It is the prompt setting. However, it is returned
# below by printing. That is why it is named "path"
# DO NOT use os.getenv for this. One Windows especially, it shows cmd.exe settings
# for bash shells. This method uses the shell directly.
path = os.getenv(shelldict['promptvar'], '')
# failsafes
if not path:
if shelldict['exe'] == 'cmd.exe':
path = '$P$G'
# strip off previous prefix, if any:
path = re.sub(".*\(\(.*\)\)\ ", "", path, count=1)
env_path = sys.argv[3]
if changeps1 and env_path:
path = "(({0})) {1}".format(os.path.split(env_path)[-1], path)
else:
# This means there is a bug in main.py
raise ValueError("unexpected command")
# This print is actually what sets the PATH or PROMPT variable. The shell
# script gets this value, and finishes the job.
print(path)
if __name__ == '__main__':
main()
| conda/cli/activate.py
--- a/conda/cli/activate.py
+++ b/conda/cli/activate.py
@@ -119,14 +119,11 @@ def main():
shelldict = shells[shell]
if sys.argv[1] == '..activate':
path = get_path(shelldict)
- if len(sys.argv) == 3 or sys.argv[3].lower() == root_env_name.lower():
- binpath = binpath_from_arg(root_env_name, shelldict=shelldict)
- rootpath = None
- elif len(sys.argv) == 4:
+ if len(sys.argv) == 4:
binpath = binpath_from_arg(sys.argv[3], shelldict=shelldict)
rootpath = binpath_from_arg(root_env_name, shelldict=shelldict)
else:
- sys.exit("Error: did not expect more than one argument")
+ sys.exit("Error: ..activate expected exactly two arguments: shell and env name")
pathlist_str = pathlist_to_str(binpath)
sys.stderr.write("prepending %s to PATH\n" % shelldict['path_to'](pathlist_str))
@@ -142,7 +139,7 @@ def main():
elif sys.argv[1] == '..checkenv':
if len(sys.argv) < 4:
- sys.argv.append(root_env_name)
+ sys.exit("Invalid arguments to checkenv. Need shell and env name/path")
if len(sys.argv) > 4:
sys.exit("Error: did not expect more than one argument.")
if sys.argv[3].lower() == root_env_name.lower(): |
Consider allowing for conda compression that uses deflate rather than bzip2
| conda/fetch.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import sys
import tempfile
import warnings
from functools import wraps
from logging import getLogger
from os.path import basename, dirname, join
from .compat import itervalues, input, urllib_quote, iterkeys, iteritems
from .config import (pkgs_dirs, DEFAULT_CHANNEL_ALIAS, remove_binstar_tokens,
hide_binstar_tokens, allowed_channels, add_pip_as_python_dependency,
ssl_verify, rc, prioritize_channels, url_channel)
from .connection import CondaSession, unparse_url, RETRIES
from .install import add_cached_package, find_new_location, package_cache, dist2pair
from .lock import Locked
from .utils import memoized
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
try:
resp = session.get(url + 'repodata.json.bz2',
headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
(remove_binstar_tokens(url), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
user = remove_binstar_tokens(url) \
.split(DEFAULT_CHANNEL_ALIAS)[1] \
.split("/")[0]
msg = 'Could not find anaconda.org user %s' % user
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and
rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir,
use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
sys.exit("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif 'url' in meta:
url = meta['url']
elif 'channel' in meta:
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = url_channel(url)
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
if not isinstance(channel_urls, dict):
channel_urls = prioritize_channels(channel_urls)
for url in iterkeys(channel_urls):
if allowed_channels and url not in allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(allowed_channels)))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError):
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in iterkeys(channel_urls)]
else:
try:
urls = tuple(channel_urls)
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
| conda/fetch.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import sys
import tempfile
import warnings
from functools import wraps
from logging import getLogger
from os.path import basename, dirname, join
from .compat import itervalues, input, urllib_quote, iterkeys, iteritems
from .config import (pkgs_dirs, DEFAULT_CHANNEL_ALIAS, remove_binstar_tokens,
hide_binstar_tokens, allowed_channels, add_pip_as_python_dependency,
ssl_verify, rc, prioritize_channels, url_channel)
from .connection import CondaSession, unparse_url, RETRIES
from .install import add_cached_package, find_new_location, package_cache, dist2pair
from .lock import Locked
from .utils import memoized
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url:
filename = 'repodata.json.bz2'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
resp = session.get(url + filename, headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
if filename.endswith('.bz2'):
json_str = bz2.decompress(resp.content).decode('utf-8')
else:
json_str = resp.content.decode('utf-8')
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: {0}{1}: {2}"
.format(remove_binstar_tokens(url), filename, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
user = remove_binstar_tokens(url) \
.split(DEFAULT_CHANNEL_ALIAS)[1] \
.split("/")[0]
msg = 'Could not find anaconda.org user %s' % user
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and
rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
sys.exit("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif 'url' in meta:
url = meta['url']
elif 'channel' in meta:
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = url_channel(url)
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
if not isinstance(channel_urls, dict):
channel_urls = prioritize_channels(channel_urls)
for url in iterkeys(channel_urls):
if allowed_channels and url not in allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(allowed_channels)))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError):
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in iterkeys(channel_urls)]
else:
try:
urls = tuple(channel_urls)
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
| conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -69,18 +69,9 @@ def func(*args, **kwargs):
return res
return func
+
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
- if not ssl_verify:
- try:
- from requests.packages.urllib3.connectionpool import InsecureRequestWarning
- except ImportError:
- pass
- else:
- warnings.simplefilter('ignore', InsecureRequestWarning)
-
- session = session or CondaSession()
-
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
@@ -91,31 +82,50 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if use_cache:
return cache
+ if not ssl_verify:
+ try:
+ from requests.packages.urllib3.connectionpool import InsecureRequestWarning
+ except ImportError:
+ pass
+ else:
+ warnings.simplefilter('ignore', InsecureRequestWarning)
+
+ session = session or CondaSession()
+
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
+ if 'repo.continuum.io' in url:
+ filename = 'repodata.json.bz2'
+ else:
+ headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
+ headers['Content-Type'] = 'application/json'
+ filename = 'repodata.json'
+
try:
- resp = session.get(url + 'repodata.json.bz2',
- headers=headers, proxies=session.proxies)
+ resp = session.get(url + filename, headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
- cache = json.loads(bz2.decompress(resp.content).decode('utf-8'))
+ if filename.endswith('.bz2'):
+ json_str = bz2.decompress(resp.content).decode('utf-8')
+ else:
+ json_str = resp.content.decode('utf-8')
+ cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
- raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" %
- (remove_binstar_tokens(url), e))
+ raise RuntimeError("Invalid index file: {0}{1}: {2}"
+ .format(remove_binstar_tokens(url), filename, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
- return fetch_repodata(url, cache_dir=cache_dir,
- use_cache=use_cache, session=session)
+ return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
@@ -161,8 +171,7 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
- return fetch_repodata(url, cache_dir=cache_dir,
- use_cache=use_cache, session=session)
+ return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url)) |
file:// URLs don't work anymore with conda 4.1.3
Conda 4.1.3 does not work anymore with **file://** URLs:
```
(E:\Anaconda3) C:\Windows\system32>conda update --override-channels --channel file:///A:/pkgs/free --all
Fetching package metadata ....Error: Could not find URL: file:///A:/pkgs/free/win-64/
```
But `A:\pkgs\free\win-64` really exists:
```
(E:\Anaconda3) C:\Windows\system32>dir A:\pkgs\free\win-64
Volume in drive A is Software
Volume Serial Number is 4546-3CD9
Directory of A:\pkgs\free\win-64
06/24/2016 12:31 AM <DIR> .
01/23/2016 06:27 PM <DIR> ..
06/24/2016 12:28 AM 259,605 repodata.json.bz2
07/07/2015 12:54 AM 85,764 argcomplete-0.9.0-py34_0.tar.bz2
```
Before upgrading from 4.0.8-py35_0 everything worked fine. The same happened to the Linux version.
| conda/fetch.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import sys
import tempfile
import warnings
from functools import wraps
from logging import getLogger
from os.path import basename, dirname, join
from .compat import itervalues, input, urllib_quote, iterkeys, iteritems
from .config import (pkgs_dirs, DEFAULT_CHANNEL_ALIAS, remove_binstar_tokens,
hide_binstar_tokens, allowed_channels, add_pip_as_python_dependency,
ssl_verify, rc, prioritize_channels, url_channel)
from .connection import CondaSession, unparse_url, RETRIES
from .install import add_cached_package, find_new_location, package_cache, dist2pair
from .lock import Locked
from .utils import memoized
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url:
filename = 'repodata.json.bz2'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
resp = session.get(url + filename, headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
if filename.endswith('.bz2'):
json_str = bz2.decompress(resp.content).decode('utf-8')
else:
json_str = resp.content.decode('utf-8')
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: {0}{1}: {2}"
.format(remove_binstar_tokens(url), filename, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
user = remove_binstar_tokens(url) \
.split(DEFAULT_CHANNEL_ALIAS)[1] \
.split("/")[0]
msg = 'Could not find anaconda.org user %s' % user
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and
rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
sys.exit("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif meta.get('url'):
url = meta['url']
elif meta.get('channel'):
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = url_channel(url)
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
if not isinstance(channel_urls, dict):
channel_urls = prioritize_channels(channel_urls)
for url in iterkeys(channel_urls):
if allowed_channels and url not in allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(allowed_channels)))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError):
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in iterkeys(channel_urls)]
else:
try:
urls = tuple(channel_urls)
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
| conda/fetch.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import sys
import tempfile
import warnings
from functools import wraps
from logging import getLogger
from os.path import basename, dirname, join
from .compat import itervalues, input, urllib_quote, iterkeys, iteritems
from .config import (pkgs_dirs, DEFAULT_CHANNEL_ALIAS, remove_binstar_tokens,
hide_binstar_tokens, allowed_channels, add_pip_as_python_dependency,
ssl_verify, rc, prioritize_channels, url_channel)
from .connection import CondaSession, unparse_url, RETRIES
from .install import add_cached_package, find_new_location, package_cache, dist2pair
from .lock import Locked
from .utils import memoized
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url or url.startswith("file://"):
filename = 'repodata.json.bz2'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
resp = session.get(url + filename, headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
if filename.endswith('.bz2'):
json_str = bz2.decompress(resp.content).decode('utf-8')
else:
json_str = resp.content.decode('utf-8')
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise RuntimeError("Invalid index file: {0}{1}: {2}"
.format(remove_binstar_tokens(url), filename, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
user = remove_binstar_tokens(url) \
.split(DEFAULT_CHANNEL_ALIAS)[1] \
.split("/")[0]
msg = 'Could not find anaconda.org user %s' % user
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and
rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise RuntimeError(msg)
cache['_url'] = remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
sys.exit("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif meta.get('url'):
url = meta['url']
elif meta.get('channel'):
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = url_channel(url)
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
if not isinstance(channel_urls, dict):
channel_urls = prioritize_channels(channel_urls)
for url in iterkeys(channel_urls):
if allowed_channels and url not in allowed_channels:
sys.exit("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(allowed_channels)))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError):
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in iterkeys(channel_urls)]
else:
try:
urls = tuple(channel_urls)
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify, SignatureError
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except SignatureError as e:
sys.exit(str(e))
sys.exit("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False,
retries=None):
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_dir):
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise RuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise RuntimeError(msg)
except IOError as e:
raise RuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
n = 0
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
more = True
while more:
# Use resp.raw so that requests doesn't decode gz files
chunk = resp.raw.read(2**14)
if not chunk:
more = False
try:
fo.write(chunk)
except IOError:
raise RuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
# update n with actual bytes read
n = resp.raw.tell()
if size and 0 <= n <= size:
getLogger('fetch.update').info(n)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
os.rename(pp, dst_path)
except OSError as e:
raise RuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
| conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -98,7 +98,7 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
- if 'repo.continuum.io' in url:
+ if 'repo.continuum.io' in url or url.startswith("file://"):
filename = 'repodata.json.bz2'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity' |
conda list misses pip-installed wheels
As of conda 4.1, `conda list` no longer captures python packages that were pip-installed and were installed from wheels.
https://www.python.org/dev/peps/pep-0427/#id14
CC @ilanschnell
| conda/egg_info.py
<|code_start|>
"""
Functions related to core conda functionality that relates to manually
installed Python packages, e.g. using "python setup.py install", or "pip".
"""
from __future__ import absolute_import, division, print_function
from io import open
import os
import re
import sys
from os.path import isdir, isfile, join
from .compat import itervalues
from .install import linked_data
from .misc import rel_path
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
if sys.platform == 'win32':
stdlib_dir = 'Lib'
else:
py_ver = info['version'][:3]
stdlib_dir = 'lib/python%s' % py_ver
return join(stdlib_dir, 'site-packages')
return None
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
if not fn.endswith(('.egg', '.egg-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
join(path, 'EGG-INFO', 'PKG-INFO')]:
if isfile(path2):
yield path2
pat = re.compile(r'(\w+):\s*(\S+)', re.I)
def parse_egg_info(path):
"""
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
for line in open(path, encoding='utf-8'):
line = line.strip()
m = pat.match(line)
if m:
key = m.group(1).lower()
info[key] = m.group(2)
try:
return '%(name)s-%(version)s-<egg_info>' % info
except KeyError:
pass
return None
def get_egg_info(prefix, all_pkgs=False):
"""
Return a set of canonical names of all Python packages (in `prefix`),
by inspecting the .egg-info files inside site-packages.
By default, only untracked (not conda installed) .egg-info files are
considered. Setting `all_pkgs` to True changes this.
"""
installed_pkgs = linked_data(prefix)
sp_dir = get_site_packages_dir(installed_pkgs)
if sp_dir is None:
return set()
conda_files = set()
for info in itervalues(installed_pkgs):
conda_files.update(info.get('files', []))
res = set()
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
try:
dist = parse_egg_info(path)
except UnicodeDecodeError:
dist = None
if dist:
res.add(dist)
return res
if __name__ == '__main__':
from pprint import pprint
pprint(get_egg_info(sys.prefix))
<|code_end|>
| conda/egg_info.py
"""
Functions related to core conda functionality that relates to manually
installed Python packages, e.g. using "python setup.py install", or "pip".
"""
from __future__ import absolute_import, division, print_function
from io import open
import os
import re
import sys
from os.path import isdir, isfile, join
from .compat import itervalues
from .install import linked_data
from .misc import rel_path
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
if sys.platform == 'win32':
stdlib_dir = 'Lib'
else:
py_ver = info['version'][:3]
stdlib_dir = 'lib/python%s' % py_ver
return join(stdlib_dir, 'site-packages')
return None
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
if not fn.endswith(('.egg', '.egg-info', '.dist-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
join(path, 'EGG-INFO', 'PKG-INFO'),
join(path, 'METADATA')]:
if isfile(path2):
yield path2
pat = re.compile(r'(\w+):\s*(\S+)', re.I)
def parse_egg_info(path):
"""
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
for line in open(path, encoding='utf-8'):
line = line.strip()
m = pat.match(line)
if m:
key = m.group(1).lower()
info[key] = m.group(2)
try:
return '%(name)s-%(version)s-<pip>' % info
except KeyError:
pass
return None
def get_egg_info(prefix, all_pkgs=False):
"""
Return a set of canonical names of all Python packages (in `prefix`),
by inspecting the .egg-info files inside site-packages.
By default, only untracked (not conda installed) .egg-info files are
considered. Setting `all_pkgs` to True changes this.
"""
installed_pkgs = linked_data(prefix)
sp_dir = get_site_packages_dir(installed_pkgs)
if sp_dir is None:
return set()
conda_files = set()
for info in itervalues(installed_pkgs):
conda_files.update(info.get('files', []))
res = set()
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
try:
dist = parse_egg_info(path)
except UnicodeDecodeError:
dist = None
if dist:
res.add(dist)
return res
if __name__ == '__main__':
from pprint import pprint
pprint(get_egg_info(sys.prefix))
| conda/egg_info.py
--- a/conda/egg_info.py
+++ b/conda/egg_info.py
@@ -29,14 +29,15 @@ def get_site_packages_dir(installed_pkgs):
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
- if not fn.endswith(('.egg', '.egg-info')):
+ if not fn.endswith(('.egg', '.egg-info', '.dist-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
- join(path, 'EGG-INFO', 'PKG-INFO')]:
+ join(path, 'EGG-INFO', 'PKG-INFO'),
+ join(path, 'METADATA')]:
if isfile(path2):
yield path2
@@ -54,7 +55,7 @@ def parse_egg_info(path):
key = m.group(1).lower()
info[key] = m.group(2)
try:
- return '%(name)s-%(version)s-<egg_info>' % info
+ return '%(name)s-%(version)s-<pip>' % info
except KeyError:
pass
return None |
Would be nice if conda config --get channels listed the channels in priority order
As far as I can tell it currently lists them in reverse order.
| conda/cli/main_config.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from .common import (Completer, add_parser_json, error_and_exit, exception_and_exit,
stdout_json_success)
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
from ..utils import yaml_load, yaml_dump
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class CouldntParse(NotImplementedError):
def __init__(self, reason):
self.args = ["""Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s""" % reason]
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar=('KEY'),
choices=BoolOrListKey()
)
action.add_argument(
"--add",
nargs=2,
action="append",
help="""Add one configuration value to the beginning of a list key.
To add to the end of the list, use --append.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--append",
nargs=2,
action="append",
help="""Add one configuration value to a list key. The default
behavior is to prepend.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParse, NotImplementedError) as e:
if args.json:
exception_and_exit(e, json=True)
else:
raise
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh) or {}
else:
rc_config = {}
# Get
if args.get is not None:
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if args.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else:
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
for item in reversed(rc_config.get(key, [])):
# Use repr so that it can be pasted back in to conda config --add
print("--add", key, repr(item))
# Add, append
for arg, prepend in zip((args.add, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
if key not in rc_list_keys:
error_and_exit("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key), json=args.json,
error_type="ValueError")
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParse("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
arglist = rc_config.setdefault(key, [])
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
item, key, "front" if prepend else "back")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
arglist.insert(0 if prepend else len(arglist), item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
yamlitem = yaml_load(item)
if key in set_bools:
if not isinstance(yamlitem, bool):
error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item),
json=args.json, error_type="TypeError")
rc_config[key] = yamlitem
elif key in set_strings:
rc_config[key] = yamlitem
else:
error_and_exit("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key), json=args.json,
error_type="ValueError")
# Remove
for key, item in args.remove:
if key not in rc_config:
if key != 'channels':
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
rc_config[key] = ['defaults']
if item not in rc_config[key]:
error_and_exit("%r is not in the %r key of the config file" %
(item, key), json=args.json, error_type="KeyError")
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if args.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
<|code_end|>
| conda/cli/main_config.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from .common import (Completer, add_parser_json, error_and_exit, exception_and_exit,
stdout_json_success)
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
from ..utils import yaml_load, yaml_dump
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class CouldntParse(NotImplementedError):
def __init__(self, reason):
self.args = ["""Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s""" % reason]
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar=('KEY'),
choices=BoolOrListKey()
)
action.add_argument(
"--add",
nargs=2,
action="append",
help="""Add one configuration value to the beginning of a list key.
To add to the end of the list, use --append.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--append",
nargs=2,
action="append",
help="""Add one configuration value to a list key. The default
behavior is to prepend.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParse, NotImplementedError) as e:
if args.json:
exception_and_exit(e, json=True)
else:
raise
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh) or {}
else:
rc_config = {}
# Get
if args.get is not None:
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if args.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else: # assume the key is a list-type
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
items = rc_config.get(key, [])
numitems = len(items)
for q, item in enumerate(reversed(items)):
# Use repr so that it can be pasted back in to conda config --add
if key == "channels" and q in (0, numitems-1):
print("--add", key, repr(item),
" # lowest priority" if q == 0 else " # highest priority")
else:
print("--add", key, repr(item))
# Add, append
for arg, prepend in zip((args.add, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
if key not in rc_list_keys:
error_and_exit("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key), json=args.json,
error_type="ValueError")
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParse("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
arglist = rc_config.setdefault(key, [])
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
item, key, "front" if prepend else "back")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
arglist.insert(0 if prepend else len(arglist), item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
yamlitem = yaml_load(item)
if key in set_bools:
if not isinstance(yamlitem, bool):
error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item),
json=args.json, error_type="TypeError")
rc_config[key] = yamlitem
elif key in set_strings:
rc_config[key] = yamlitem
else:
error_and_exit("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key), json=args.json,
error_type="ValueError")
# Remove
for key, item in args.remove:
if key not in rc_config:
if key != 'channels':
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
rc_config[key] = ['defaults']
if item not in rc_config[key]:
error_and_exit("%r is not in the %r key of the config file" %
(item, key), json=args.json, error_type="KeyError")
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
error_and_exit("key %r is not in the config file" % key, json=args.json,
error_type="KeyError")
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if args.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
| conda/cli/main_config.py
--- a/conda/cli/main_config.py
+++ b/conda/cli/main_config.py
@@ -257,13 +257,19 @@ def execute_config(args, parser):
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
- else:
+ else: # assume the key is a list-type
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
- for item in reversed(rc_config.get(key, [])):
+ items = rc_config.get(key, [])
+ numitems = len(items)
+ for q, item in enumerate(reversed(items)):
# Use repr so that it can be pasted back in to conda config --add
- print("--add", key, repr(item))
+ if key == "channels" and q in (0, numitems-1):
+ print("--add", key, repr(item),
+ " # lowest priority" if q == 0 else " # highest priority")
+ else:
+ print("--add", key, repr(item))
# Add, append
for arg, prepend in zip((args.add, args.append), (True, False)): |
conda throws error if allow_other_channels setting is used
The feature to lock down what channels your users are allowed to use stopped working
http://conda.pydata.org/docs/install/central.html#allow-other-channels-allow-other-channels
Reproduced this error in Windows 10 and OS X 10.11.5, if you use this setting in the systemwide .condarc file.
```
$ cat /Users/jenns/anaconda/.condarc
allow_other_channels: False
channels:
- defaults
```
```
$ conda info
Traceback (most recent call last):
File "/Users/jenns/anaconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/Users/jenns/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 61, in main
from conda.cli import conda_argparse
File "/Users/jenns/anaconda/lib/python2.7/site-packages/conda/cli/conda_argparse.py", line 15, in <module>
from .common import add_parser_help
File "/Users/jenns/anaconda/lib/python2.7/site-packages/conda/cli/common.py", line 12, in <module>
from ..config import (envs_dirs, default_prefix, platform, update_dependencies,
File "/Users/jenns/anaconda/lib/python2.7/site-packages/conda/config.py", line 331, in <module>
allowed_channels = get_allowed_channels()
File "/Users/jenns/anaconda/lib/python2.7/site-packages/conda/config.py", line 329, in get_allowed_channels
return normalize_urls(base_urls)
File "/Users/jenns/anaconda/lib/python2.7/site-packages/conda/config.py", line 253, in normalize_urls
urls = get_rc_urls() + urls
File "/Users/jenns/anaconda/lib/python2.7/site-packages/conda/config.py", line 197, in get_rc_urls
if rc.get('channels') is None:
AttributeError: 'NoneType' object has no attribute 'get'
```
| conda/config.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from collections import OrderedDict
from os.path import abspath, expanduser, isfile, isdir, join
from platform import machine
from .compat import urlparse, string_types
from .utils import try_write, yaml_load
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
default_python = '%d.%d' % sys.version_info[:2]
# CONDA_FORCE_32BIT should only be used when running conda-build (in order
# to build 32-bit packages on a 64-bit system). We don't want to mention it
# in the documentation, because it can mess up a lot of things.
force_32bit = bool(int(os.getenv('CONDA_FORCE_32BIT', 0)))
# ----- operating system and architecture -----
_sys_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
'openbsd5': 'openbsd',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if force_32bit:
bits = 32
if platform == 'linux' and machine() in non_x86_linux_machines:
arch_name = machine()
subdir = 'linux-%s' % arch_name
else:
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
subdir = '%s-%d' % (platform, bits)
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
ADD_BINSTAR_TOKEN = True
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'auto_update_conda',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
'root_dir',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
local_channel = []
rc = root_dir = root_writable = BINSTAR_TOKEN_PAT = channel_alias = None
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc_(path):
if not path or not isfile(path):
return {}
with open(path) as f:
return yaml_load(f) or {}
sys_rc = load_condarc_(sys_rc_path) if isfile(sys_rc_path) else {}
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
# ~/envs for backwards compatibility
lst = ['~/.conda/envs', '~/envs'] + lst
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs32' if force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
# ----- channels -----
# Note, get_*_urls() return unnormalized urls.
def get_local_urls(clear_cache=True):
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
if clear_cache:
from .fetch import fetch_index
fetch_index.cache = {}
if local_channel:
return local_channel
from os.path import exists
from .utils import url_path
try:
from conda_build.config import croot
if exists(croot):
local_channel.append(url_path(croot))
except ImportError:
pass
return local_channel
defaults_ = ['https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro']
def get_default_urls(merged=False):
if 'default_channels' in sys_rc:
res = sys_rc['default_channels']
if merged:
res = list(res)
res.extend(c for c in defaults_ if c not in res)
return res
return defaults_
def get_rc_urls():
if rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
if url:
p = urlparse.urlparse(url)
return p.netloc != "" or p.scheme == "file"
def binstar_channel_alias(channel_alias):
if channel_alias.startswith('file:/'):
return channel_alias
if rc.get('add_anaconda_token',
rc.get('add_binstar_token', ADD_BINSTAR_TOKEN)):
try:
from binstar_client.utils import get_binstar
bs = get_binstar()
bs_domain = bs.domain.replace("api", "conda").rstrip('/') + '/'
if channel_alias.startswith(bs_domain) and bs.token:
channel_alias += 't/%s/' % bs.token
except ImportError:
log.debug("Could not import binstar")
pass
except Exception as e:
stderrlog.info("Warning: could not import binstar_client (%s)" % e)
return channel_alias
def hide_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1t/<TOKEN>/', url)
def remove_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
def prioritize_channels(channels):
newchans = OrderedDict()
lastchan = None
priority = 0
for channel in channels:
channel = channel.rstrip('/') + '/'
if channel not in newchans:
channel_s = canonical_channel_name(channel.rsplit('/', 2)[0])
priority += channel_s != lastchan
newchans[channel] = (channel_s, priority)
lastchan = channel_s
return newchans
def normalize_urls(urls, platform=None, offline_only=False):
defaults = tuple(x.rstrip('/') + '/' for x in get_default_urls(False))
alias = None
newurls = []
while urls:
url = urls[0]
urls = urls[1:]
if url == "system" and rc_path:
urls = get_rc_urls() + urls
continue
elif url in ("defaults", "system"):
t_urls = defaults
elif url == "local":
t_urls = get_local_urls()
else:
t_urls = [url]
for url0 in t_urls:
url0 = url0.rstrip('/')
if not is_url(url0):
if alias is None:
alias = binstar_channel_alias(channel_alias)
url0 = alias + url0
if offline_only and not url0.startswith('file:'):
continue
for plat in (platform or subdir, 'noarch'):
newurls.append('%s/%s/' % (url0, plat))
return newurls
def get_channel_urls(platform=None, offline=False):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
elif 'channels' in rc:
base_urls = ['system']
else:
base_urls = ['defaults']
res = normalize_urls(base_urls, platform, offline)
return res
def canonical_channel_name(channel):
if channel is None:
return '<unknown>'
channel = remove_binstar_tokens(channel).rstrip('/')
if any(channel.startswith(i) for i in get_default_urls(True)):
return 'defaults'
elif any(channel.startswith(i) for i in get_local_urls(clear_cache=False)):
return 'local'
elif channel.startswith('http://filer/'):
return 'filer'
elif channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1]
elif channel.startswith('http:/'):
channel2 = 'https' + channel[4:]
channel3 = canonical_channel_name(channel2)
return channel3 if channel3 != channel2 else channel
else:
return channel
def url_channel(url):
parts = (url or '').rsplit('/', 2)
if len(parts) == 1:
return '<unknown>', '<unknown>'
if len(parts) == 2:
return parts[0], parts[0]
if url.startswith('file://') and parts[1] not in ('noarch', subdir):
# Explicit file-based URLs are denoted with a '/' in the schannel
channel = parts[0] + '/' + parts[1]
schannel = channel + '/'
else:
channel = parts[0]
schannel = canonical_channel_name(channel)
return channel, schannel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = ['system']
else:
base_urls = ['default']
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers') or {}
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
def load_condarc(path):
rc = load_condarc_(path)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT', rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
globals().update(locals())
envs_dirs = [abspath(expanduser(p)) for p in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if not sys_rc.get('allow_other_channels', True) and 'channel_alias' in sys_rc:
channel_alias = sys_rc['channel_alias']
channel_alias = channel_alias.rstrip('/')
_binstar = r'((:?%s|binstar\.org|anaconda\.org)/?)(t/[0-9a-zA-Z\-<>]{4,})/'
BINSTAR_TOKEN_PAT = re.compile(_binstar % re.escape(channel_alias))
channel_alias = BINSTAR_TOKEN_PAT.sub(r'\1', channel_alias + '/')
offline = bool(rc.get('offline', False))
add_pip_as_python_dependency = bool(rc.get('add_pip_as_python_dependency', True))
always_yes = bool(rc.get('always_yes', False))
always_copy = bool(rc.get('always_copy', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('anaconda_upload',
rc.get('binstar_upload', None)) # None means ask
allow_softlinks = bool(rc.get('allow_softlinks', True))
auto_update_conda = bool(rc.get('auto_update_conda', rc.get('self_update', True)))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = rc.get('show_channel_urls', None) # None means letting conda decide
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
update_dependencies = bool(rc.get('update_dependencies', True))
channel_priority = bool(rc.get('channel_priority', True))
# ssl_verify can be a boolean value or a filename string
ssl_verify = rc.get('ssl_verify', True)
try:
track_features = rc.get('track_features', [])
if isinstance(track_features, string_types):
track_features = track_features.split()
track_features = set(track_features)
except KeyError:
track_features = None
globals().update(locals())
return rc
load_condarc(rc_path)
<|code_end|>
| conda/config.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import logging
import os
import re
import sys
from collections import OrderedDict
from os.path import abspath, expanduser, isfile, isdir, join
from platform import machine
from .compat import urlparse, string_types
from .utils import try_write, yaml_load
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
default_python = '%d.%d' % sys.version_info[:2]
# CONDA_FORCE_32BIT should only be used when running conda-build (in order
# to build 32-bit packages on a 64-bit system). We don't want to mention it
# in the documentation, because it can mess up a lot of things.
force_32bit = bool(int(os.getenv('CONDA_FORCE_32BIT', 0)))
# ----- operating system and architecture -----
_sys_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
'openbsd5': 'openbsd',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
platform = _sys_map.get(sys.platform, 'unknown')
bits = 8 * tuple.__itemsize__
if force_32bit:
bits = 32
if platform == 'linux' and machine() in non_x86_linux_machines:
arch_name = machine()
subdir = 'linux-%s' % arch_name
else:
arch_name = {64: 'x86_64', 32: 'x86'}[bits]
subdir = '%s-%d' % (platform, bits)
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
ADD_BINSTAR_TOKEN = True
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'auto_update_conda',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
'root_dir',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
local_channel = []
rc = root_dir = root_writable = BINSTAR_TOKEN_PAT = channel_alias = None
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
def load_condarc_(path):
if not path or not isfile(path):
return {}
with open(path) as f:
return yaml_load(f) or {}
sys_rc = load_condarc_(sys_rc_path) if isfile(sys_rc_path) else {}
# ----- local directories -----
# root_dir should only be used for testing, which is why don't mention it in
# the documentation, to avoid confusion (it can really mess up a lot of
# things)
root_env_name = 'root'
def _default_envs_dirs():
lst = [join(root_dir, 'envs')]
if not root_writable:
# ~/envs for backwards compatibility
lst = ['~/.conda/envs', '~/envs'] + lst
return lst
def _pathsep_env(name):
x = os.getenv(name)
if x is None:
return []
res = []
for path in x.split(os.pathsep):
if path == 'DEFAULTS':
for p in rc.get('envs_dirs') or _default_envs_dirs():
res.append(p)
else:
res.append(path)
return res
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(root_dir, 'envs')):
return join(root_dir, 'pkgs32' if force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
# ----- channels -----
# Note, get_*_urls() return unnormalized urls.
def get_local_urls(clear_cache=True):
# remove the cache such that a refetch is made,
# this is necessary because we add the local build repo URL
if clear_cache:
from .fetch import fetch_index
fetch_index.cache = {}
if local_channel:
return local_channel
from os.path import exists
from .utils import url_path
try:
from conda_build.config import croot
if exists(croot):
local_channel.append(url_path(croot))
except ImportError:
pass
return local_channel
defaults_ = ['https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro']
def get_default_urls(merged=False):
if 'default_channels' in sys_rc:
res = sys_rc['default_channels']
if merged:
res = list(res)
res.extend(c for c in defaults_ if c not in res)
return res
return defaults_
def get_rc_urls():
if rc is None or rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc")
return rc['channels']
def is_url(url):
if url:
p = urlparse.urlparse(url)
return p.netloc != "" or p.scheme == "file"
def binstar_channel_alias(channel_alias):
if channel_alias.startswith('file:/'):
return channel_alias
if rc.get('add_anaconda_token',
rc.get('add_binstar_token', ADD_BINSTAR_TOKEN)):
try:
from binstar_client.utils import get_binstar
bs = get_binstar()
bs_domain = bs.domain.replace("api", "conda").rstrip('/') + '/'
if channel_alias.startswith(bs_domain) and bs.token:
channel_alias += 't/%s/' % bs.token
except ImportError:
log.debug("Could not import binstar")
pass
except Exception as e:
stderrlog.info("Warning: could not import binstar_client (%s)" % e)
return channel_alias
def hide_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1t/<TOKEN>/', url)
def remove_binstar_tokens(url):
return BINSTAR_TOKEN_PAT.sub(r'\1', url)
def prioritize_channels(channels):
newchans = OrderedDict()
lastchan = None
priority = 0
for channel in channels:
channel = channel.rstrip('/') + '/'
if channel not in newchans:
channel_s = canonical_channel_name(channel.rsplit('/', 2)[0])
priority += channel_s != lastchan
newchans[channel] = (channel_s, priority)
lastchan = channel_s
return newchans
def normalize_urls(urls, platform=None, offline_only=False):
defaults = tuple(x.rstrip('/') + '/' for x in get_default_urls(False))
alias = None
newurls = []
while urls:
url = urls[0]
urls = urls[1:]
if url == "system" and rc_path:
urls = get_rc_urls() + urls
continue
elif url in ("defaults", "system"):
t_urls = defaults
elif url == "local":
t_urls = get_local_urls()
else:
t_urls = [url]
for url0 in t_urls:
url0 = url0.rstrip('/')
if not is_url(url0):
if alias is None:
alias = binstar_channel_alias(channel_alias)
url0 = alias + url0
if offline_only and not url0.startswith('file:'):
continue
for plat in (platform or subdir, 'noarch'):
newurls.append('%s/%s/' % (url0, plat))
return newurls
def get_channel_urls(platform=None, offline=False):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
elif 'channels' in rc:
base_urls = ['system']
else:
base_urls = ['defaults']
res = normalize_urls(base_urls, platform, offline)
return res
def canonical_channel_name(channel):
if channel is None:
return '<unknown>'
channel = remove_binstar_tokens(channel).rstrip('/')
if any(channel.startswith(i) for i in get_default_urls(True)):
return 'defaults'
elif any(channel.startswith(i) for i in get_local_urls(clear_cache=False)):
return 'local'
elif channel.startswith('http://filer/'):
return 'filer'
elif channel.startswith(channel_alias):
return channel.split(channel_alias, 1)[1]
elif channel.startswith('http:/'):
channel2 = 'https' + channel[4:]
channel3 = canonical_channel_name(channel2)
return channel3 if channel3 != channel2 else channel
else:
return channel
def url_channel(url):
parts = (url or '').rsplit('/', 2)
if len(parts) == 1:
return '<unknown>', '<unknown>'
if len(parts) == 2:
return parts[0], parts[0]
if url.startswith('file://') and parts[1] not in ('noarch', subdir):
# Explicit file-based URLs are denoted with a '/' in the schannel
channel = parts[0] + '/' + parts[1]
schannel = channel + '/'
else:
channel = parts[0]
schannel = canonical_channel_name(channel)
return channel, schannel
# ----- allowed channels -----
def get_allowed_channels():
if not isfile(sys_rc_path):
return None
if sys_rc.get('allow_other_channels', True):
return None
if 'channels' in sys_rc:
base_urls = ['system']
else:
base_urls = ['default']
return normalize_urls(base_urls)
allowed_channels = get_allowed_channels()
# ----- proxy -----
def get_proxy_servers():
res = rc.get('proxy_servers') or {}
if isinstance(res, dict):
return res
sys.exit("Error: proxy_servers setting not a mapping")
def load_condarc(path):
rc = load_condarc_(path)
root_dir = abspath(expanduser(os.getenv('CONDA_ROOT', rc.get('root_dir', sys.prefix))))
root_writable = try_write(root_dir)
globals().update(locals())
envs_dirs = [abspath(expanduser(p)) for p in (
_pathsep_env('CONDA_ENVS_PATH') or
rc.get('envs_dirs') or
_default_envs_dirs()
)]
pkgs_dirs = [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in envs_dirs]
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, root_env_name):
default_prefix = root_dir
elif os.sep in _default_env:
default_prefix = abspath(_default_env)
else:
for envs_dir in envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
break
else:
default_prefix = join(envs_dirs[0], _default_env)
# ----- foreign -----
try:
with open(join(root_dir, 'conda-meta', 'foreign')) as fi:
foreign = fi.read().split()
except IOError:
foreign = [] if isdir(join(root_dir, 'conda-meta')) else ['python']
channel_alias = rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS)
if not sys_rc.get('allow_other_channels', True) and 'channel_alias' in sys_rc:
channel_alias = sys_rc['channel_alias']
channel_alias = channel_alias.rstrip('/')
_binstar = r'((:?%s|binstar\.org|anaconda\.org)/?)(t/[0-9a-zA-Z\-<>]{4,})/'
BINSTAR_TOKEN_PAT = re.compile(_binstar % re.escape(channel_alias))
channel_alias = BINSTAR_TOKEN_PAT.sub(r'\1', channel_alias + '/')
offline = bool(rc.get('offline', False))
add_pip_as_python_dependency = bool(rc.get('add_pip_as_python_dependency', True))
always_yes = bool(rc.get('always_yes', False))
always_copy = bool(rc.get('always_copy', False))
changeps1 = bool(rc.get('changeps1', True))
use_pip = bool(rc.get('use_pip', True))
binstar_upload = rc.get('anaconda_upload',
rc.get('binstar_upload', None)) # None means ask
allow_softlinks = bool(rc.get('allow_softlinks', True))
auto_update_conda = bool(rc.get('auto_update_conda', rc.get('self_update', True)))
# show channel URLs when displaying what is going to be downloaded
show_channel_urls = rc.get('show_channel_urls', None) # None means letting conda decide
# set packages disallowed to be installed
disallow = set(rc.get('disallow', []))
# packages which are added to a newly created environment by default
create_default_packages = list(rc.get('create_default_packages', []))
update_dependencies = bool(rc.get('update_dependencies', True))
channel_priority = bool(rc.get('channel_priority', True))
# ssl_verify can be a boolean value or a filename string
ssl_verify = rc.get('ssl_verify', True)
try:
track_features = rc.get('track_features', [])
if isinstance(track_features, string_types):
track_features = track_features.split()
track_features = set(track_features)
except KeyError:
track_features = None
globals().update(locals())
return rc
load_condarc(rc_path)
| conda/config.py
--- a/conda/config.py
+++ b/conda/config.py
@@ -194,7 +194,7 @@ def get_default_urls(merged=False):
return defaults_
def get_rc_urls():
- if rc.get('channels') is None:
+ if rc is None or rc.get('channels') is None:
return []
if 'system' in rc['channels']:
raise RuntimeError("system cannot be used in .condarc") |
conda install from tarball error?
Running into this issue when trying to install directly from a tarball.
```
Traceback (most recent call last):
File "/usr/local/bin/conda2", line 6, in <module>
sys.exit(main())
An unexpected error has occurred, please consider sending the
following traceback to the conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Include the output of the command 'conda info' in your report.
File "/opt/conda2/lib/python2.7/site-packages/conda/cli/main.py", line 120, in main
exit_code = args_func(args, p)
File "/opt/conda2/lib/python2.7/site-packages/conda/cli/main.py", line 130, in args_func
exit_code = args.func(args, p)
File "/opt/conda2/lib/python2.7/site-packages/conda/cli/main_install.py", line 69, in execute
install(args, parser, 'install')
File "/opt/conda2/lib/python2.7/site-packages/conda/cli/install.py", line 196, in install
explicit(args.packages, prefix, verbose=not args.quiet)
File "/opt/conda2/lib/python2.7/site-packages/conda/misc.py", line 79, in explicit
is_file = schannel.startswith('file:') and schannel.endswith('/')
UnboundLocalError: local variable 'schannel' referenced before assignment
```
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import time
import tempfile
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, normpath)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
pkgs_dirs = [sys.prefix]
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def exp_backoff_fn(fn, *args):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args)
import random
max_retries = 5
for n in range(max_retries):
try:
result = fn(*args)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (errno.EPERM, errno.EACCES):
if n == max_retries-1:
raise
time.sleep(((2 ** n) + random.random()) * 1e-3)
else:
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
return
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
if trash and move_path_to_trash(path):
return
elif isdir(path):
# On Windows, always move to trash first.
if trash and on_win and move_path_to_trash(path, preclean=False):
return
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
if trash and move_path_to_trash(path):
return
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normpath(prefix) in normpath(root_dir):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno == errno.EPERM or e.errno == errno.EACCES)):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = url_channel(url)
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not isdir(trash_dir):
continue
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
if preclean:
delete_trash()
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_file = tempfile.mktemp(dir=trash_dir)
try:
os.rename(path, trash_file)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_file, e))
else:
log.debug("Moved to trash: %s" % (path,))
delete_linked_data_any(path)
if not preclean:
rm_rf(trash_file, max_retries=1, trash=False)
return True
return False
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
rm_rf(dst)
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
iterable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
# This CLI is only invoked from the self-extracting shell installers
global pkgs_dirs
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
assert idists
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
conda/misc.py
<|code_start|>
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url, dist2filename)
from .compat import iteritems, itervalues
from .config import is_url, url_channel, root_dir, envs_dirs, subdir
from .fetch import fetch_index
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, url_path as utils_url_path
from .api import get_index
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
linked = {name_dist(dist): dist for dist in install_linked(prefix)}
fetch_args = fetch_args or {}
index = index or {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
sys.exit('Error: file not found: %s' % join(url_p, fn))
url_p = utils_url_path(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None
if url.startswith('file://'):
prefix = cached_url(url)
# If not, determine the channel name from the URL
if prefix is None:
channel, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
is_file = schannel.startswith('file:') and schannel.endswith('/')
# Add explicit file to index so we'll see it later
if is_file:
index[fn] = {'fn': dist2filename(fn), 'url': url, 'md5': None}
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
# Also remove explicit tarballs from cache
if pkg_path and (is_file or md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
if not is_file:
if fn not in index or index[fn].get('not_fetched'):
channels[url_p + '/'] = (schannel, 0)
verifies.append((dist + '.tar.bz2', md5))
actions[FETCH].append(dist)
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index.update(fetch_index(channels, **fetch_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
fetch_args = fetch_args or {}
index = get_index(**fetch_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = name_dist(dist)
fn = dist2filename(dist)
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[dist] = r.index[dist]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, subdir, notfound)
raise RuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist + '.tar.bz2'
if fkey not in index:
info['not_fetched'] = True
index[fkey] = info
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, fetch_args=fetch_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, normpath)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
pkgs_dirs = [sys.prefix]
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
with open(dst+'.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def exp_backoff_fn(fn, *args):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args)
import random
max_retries = 5
for n in range(max_retries):
try:
result = fn(*args)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (errno.EPERM, errno.EACCES):
if n == max_retries-1:
raise
time.sleep(((2 ** n) + random.random()) * 1e-3)
else:
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
return
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
if trash and move_path_to_trash(path):
return
elif isdir(path):
# On Windows, always move to trash first.
if trash and on_win and move_path_to_trash(path, preclean=False):
return
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
if trash and move_path_to_trash(path):
return
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normpath(prefix) in normpath(root_dir):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno == errno.EPERM or e.errno == errno.EACCES)):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = url_channel(url)
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not isdir(trash_dir):
continue
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
if preclean:
delete_trash()
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_file = tempfile.mktemp(dir=trash_dir)
try:
os.rename(path, trash_file)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_file, e))
else:
log.debug("Moved to trash: %s" % (path,))
delete_linked_data_any(path)
if not preclean:
rm_rf(trash_file, max_retries=1, trash=False)
return True
return False
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
rm_rf(dst)
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
iterable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
# This CLI is only invoked from the self-extracting shell installers
global pkgs_dirs
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
assert idists
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
conda/misc.py
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url, dist2filename)
from .compat import iteritems, itervalues
from .config import is_url, url_channel, root_dir, envs_dirs, subdir
from .fetch import fetch_index
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, url_path as utils_url_path
from .api import get_index
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
linked = {name_dist(dist): dist for dist in install_linked(prefix)}
fetch_args = fetch_args or {}
index = index or {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
sys.exit('Error: file not found: %s' % join(url_p, fn))
url_p = utils_url_path(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# See if the URL refers to a package in our cache
prefix = pkg_path = dir_path = None
if url.startswith('file://'):
prefix = cached_url(url)
if prefix is not None:
schannel = 'defaults' if prefix == '' else prefix[:-2]
is_file = False
# If not, determine the channel name from the URL
if prefix is None:
channel, schannel = url_channel(url)
is_file = schannel.startswith('file:') and schannel.endswith('/')
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
# Add explicit file to index so we'll see it later
if is_file:
index[fn] = {'fn': dist2filename(fn), 'url': url, 'md5': None}
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
# Also remove explicit tarballs from cache
if pkg_path and (is_file or md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
if not is_file:
if fn not in index or index[fn].get('not_fetched'):
channels[url_p + '/'] = (schannel, 0)
verifies.append((dist + '.tar.bz2', md5))
actions[FETCH].append(dist)
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index.update(fetch_index(channels, **fetch_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, fetch_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
fetch_args = fetch_args or {}
index = get_index(**fetch_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = name_dist(dist)
fn = dist2filename(dist)
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[dist] = r.index[dist]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, subdir, notfound)
raise RuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist + '.tar.bz2'
if fkey not in index:
info['not_fetched'] = True
index[fkey] = info
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, fetch_args=fetch_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -41,7 +41,6 @@
import tarfile
import tempfile
import time
-import tempfile
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, normpath)
conda/misc.py
--- a/conda/misc.py
+++ b/conda/misc.py
@@ -69,14 +69,18 @@ def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None,
prefix = pkg_path = dir_path = None
if url.startswith('file://'):
prefix = cached_url(url)
+ if prefix is not None:
+ schannel = 'defaults' if prefix == '' else prefix[:-2]
+ is_file = False
# If not, determine the channel name from the URL
if prefix is None:
channel, schannel = url_channel(url)
+ is_file = schannel.startswith('file:') and schannel.endswith('/')
prefix = '' if schannel == 'defaults' else schannel + '::'
+
fn = prefix + fn
dist = fn[:-8]
- is_file = schannel.startswith('file:') and schannel.endswith('/')
# Add explicit file to index so we'll see it later
if is_file:
index[fn] = {'fn': dist2filename(fn), 'url': url, 'md5': None} |
[Regression] Conda create environment fails on lock if root environment is not under user control
This issue is introduced in Conda 4.1.0 (Conda 4.0.8 works fine).
```
$ conda create -n root2 python=2 [123/1811]
Fetching package metadata .......
Solving package specifications .............
Package plan for installation in environment /home/frol/.conda/envs/root2:
The following NEW packages will be INSTALLED:
openssl: 1.0.2h-1 (soft-link)
pip: 8.1.2-py27_0 (soft-link)
python: 2.7.11-0 (soft-link)
readline: 6.2-2 (soft-link)
setuptools: 23.0.0-py27_0 (soft-link)
sqlite: 3.13.0-0 (soft-link)
tk: 8.5.18-0 (soft-link)
wheel: 0.29.0-py27_0 (soft-link)
zlib: 1.2.8-3 (soft-link)
Proceed ([y]/n)?
Linking packages ...
An unexpected error has occurred, please consider sending the
following traceback to the conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Include the output of the command 'conda info' in your report.
Traceback (most recent call last):
File "/usr/local/miniconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 120, in main
args_func(args, p)
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 127, in args_func
args.func(args, p)
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 57, in execute
install(args, parser, 'create')
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 407, in install
execute_actions(actions, index, verbose=not args.quiet)
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/plan.py", line 566, in execute_actions
inst.execute_instructions(plan, index, verbose)
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/instructions.py", line 137, in execute_instructions
cmd(state, arg)
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/instructions.py", line 80, in LINK_CMD
link(state['prefix'], dist, lt, index=state['index'], shortcuts=shortcuts)
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/install.py", line 1035, in link
with Locked(prefix), Locked(pkgs_dir):
File "/usr/local/miniconda/lib/python2.7/site-packages/conda/lock.py", line 60, in __enter__
os.makedirs(self.lock_path)
File "/usr/local/miniconda/lib/python2.7/os.py", line 157, in makedirs
mkdir(name, mode)
OSError: [Errno 13] Permission denied: '/usr/local/miniconda/pkgs/.conda_lock-949'
```
`/usr/local/miniconda/` is a system-wide installation of miniconda, so obviously, users cannot create lock files there.
P.S. I have a dream that updating conda software won't break things on every release...
| conda/egg_info.py
<|code_start|>
"""
Functions related to core conda functionality that relates to manually
installed Python packages, e.g. using "python setup.py install", or "pip".
"""
from __future__ import absolute_import, division, print_function
from io import open
import os
import re
import sys
from os.path import isdir, isfile, join
from .compat import itervalues
from .install import linked_data
from .misc import rel_path
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
if sys.platform == 'win32':
stdlib_dir = 'Lib'
else:
py_ver = info['version'][:3]
stdlib_dir = 'lib/python%s' % py_ver
return join(stdlib_dir, 'site-packages')
return None
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
if not fn.endswith(('.egg', '.egg-info', '.dist-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
join(path, 'EGG-INFO', 'PKG-INFO'),
join(path, 'METADATA')]:
if isfile(path2):
yield path2
pat = re.compile(r'(\w+):\s*(\S+)', re.I)
def parse_egg_info(path):
"""
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
for line in open(path, encoding='utf-8'):
line = line.strip()
m = pat.match(line)
if m:
key = m.group(1).lower()
info[key] = m.group(2)
try:
return '%(name)s-%(version)s-<pip>' % info
except KeyError:
pass
return None
def get_egg_info(prefix, all_pkgs=False):
"""
Return a set of canonical names of all Python packages (in `prefix`),
by inspecting the .egg-info files inside site-packages.
By default, only untracked (not conda installed) .egg-info files are
considered. Setting `all_pkgs` to True changes this.
"""
installed_pkgs = linked_data(prefix)
sp_dir = get_site_packages_dir(installed_pkgs)
if sp_dir is None:
return set()
conda_files = set()
for info in itervalues(installed_pkgs):
conda_files.update(info.get('files', []))
res = set()
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
try:
dist = parse_egg_info(path)
except UnicodeDecodeError:
dist = None
if dist:
res.add(dist)
return res
if __name__ == '__main__':
from pprint import pprint
pprint(get_egg_info(sys.prefix))
<|code_end|>
conda/exceptions.py
<|code_start|>
from __future__ import absolute_import, division, print_function
class CondaException(Exception):
pass
class InvalidInstruction(CondaException):
def __init__(self, instruction, *args, **kwargs):
msg = "No handler for instruction: %r" % instruction
super(InvalidInstruction, self).__init__(msg, *args, **kwargs)
class LockError(RuntimeError, CondaException):
pass
<|code_end|>
conda/lock.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
"""
Tools for working with locks
A lock is just an empty directory. We use directories because this lets us use
the race condition-proof os.makedirs.
For now, there is one global lock for all of conda, because some things happen
globally (such as downloading packages).
We don't raise an error if the lock is named with the current PID
"""
from __future__ import absolute_import, division, print_function
import logging
import os
import time
from .exceptions import LockError
LOCKFN = '.conda_lock'
stdoutlog = logging.getLogger('stdoutlog')
class Locked(object):
"""
Context manager to handle locks.
"""
def __init__(self, path, retries=10):
self.path = path
self.end = "-" + str(os.getpid())
self.lock_path = os.path.join(self.path, LOCKFN + self.end)
self.retries = retries
def __enter__(self):
# Keep the string "LOCKERROR" in this string so that external
# programs can look for it.
lockstr = ("""\
LOCKERROR: It looks like conda is already doing something.
The lock %s was found. Wait for it to finish before continuing.
If you are sure that conda is not running, remove it and try again.
You can also use: $ conda clean --lock\n""")
sleeptime = 1
for _ in range(self.retries):
if os.path.isdir(self.lock_path):
stdoutlog.info(lockstr % self.lock_path)
stdoutlog.info("Sleeping for %s seconds\n" % sleeptime)
time.sleep(sleeptime)
sleeptime *= 2
else:
os.makedirs(self.lock_path)
return self
stdoutlog.error("Exceeded max retries, giving up")
raise LockError(lockstr % self.lock_path)
def __exit__(self, exc_type, exc_value, traceback):
try:
os.rmdir(self.lock_path)
os.rmdir(self.path)
except OSError:
pass
<|code_end|>
| conda/egg_info.py
"""
Functions related to core conda functionality that relates to manually
installed Python packages, e.g. using "python setup.py install", or "pip".
"""
from __future__ import absolute_import, division, print_function
from io import open
import os
import re
import sys
from os.path import isdir, isfile, join
from .compat import itervalues
from .install import linked_data
from .misc import rel_path
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
if sys.platform == 'win32':
stdlib_dir = 'Lib'
else:
py_ver = info['version'][:3]
stdlib_dir = 'lib/python%s' % py_ver
return join(stdlib_dir, 'site-packages')
return None
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
if not fn.endswith(('.egg', '.egg-info', '.dist-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
join(path, 'EGG-INFO', 'PKG-INFO'),
join(path, 'METADATA')]:
if isfile(path2):
yield path2
pat = re.compile(r'(\w+):\s*(\S+)', re.I)
def parse_egg_info(path):
"""
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
for line in open(path, encoding='utf-8'):
line = line.strip()
m = pat.match(line)
if m:
key = m.group(1).lower()
info[key] = m.group(2)
try:
return '%(name)s-%(version)s-<pip>' % info
except KeyError:
pass
return None
def get_egg_info(prefix, all_pkgs=False):
"""
Return a set of canonical names of all Python packages (in `prefix`),
by inspecting the .egg-info files inside site-packages.
By default, only untracked (not conda installed) .egg-info files are
considered. Setting `all_pkgs` to True changes this.
"""
installed_pkgs = linked_data(prefix)
sp_dir = get_site_packages_dir(installed_pkgs)
if sp_dir is None:
return set()
conda_files = set()
for info in itervalues(installed_pkgs):
conda_files.update(info.get('files', []))
res = set()
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
try:
dist = parse_egg_info(path)
except UnicodeDecodeError:
dist = None
if dist:
res.add(dist)
return res
if __name__ == '__main__':
from pprint import pprint
pprint(get_egg_info(sys.prefix))
conda/exceptions.py
from __future__ import absolute_import, division, print_function
class CondaException(Exception):
pass
class InvalidInstruction(CondaException):
def __init__(self, instruction, *args, **kwargs):
msg = "No handler for instruction: %r" % instruction
super(InvalidInstruction, self).__init__(msg, *args, **kwargs)
conda/lock.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
"""
Tools for working with locks
A lock is just an empty directory. We use directories because this lets us use
the race condition-proof os.makedirs.
For now, there is one global lock for all of conda, because some things happen
globally (such as downloading packages).
We don't raise an error if the lock is named with the current PID
"""
from __future__ import absolute_import, division, print_function
import os
import logging
from os.path import join
import glob
from time import sleep
LOCKFN = '.conda_lock'
stdoutlog = logging.getLogger('stdoutlog')
class Locked(object):
"""
Context manager to handle locks.
"""
def __init__(self, path):
self.path = path
self.end = "-" + str(os.getpid())
self.lock_path = join(self.path, LOCKFN + self.end)
self.pattern = join(self.path, LOCKFN + '-*')
self.remove = True
def __enter__(self):
retries = 10
# Keep the string "LOCKERROR" in this string so that external
# programs can look for it.
lockstr = ("""\
LOCKERROR: It looks like conda is already doing something.
The lock %s was found. Wait for it to finish before continuing.
If you are sure that conda is not running, remove it and try again.
You can also use: $ conda clean --lock\n""")
sleeptime = 1
files = None
while retries:
files = glob.glob(self.pattern)
if files and not files[0].endswith(self.end):
stdoutlog.info(lockstr % str(files))
stdoutlog.info("Sleeping for %s seconds\n" % sleeptime)
sleep(sleeptime)
sleeptime *= 2
retries -= 1
else:
break
else:
stdoutlog.error("Exceeded max retries, giving up")
raise RuntimeError(lockstr % str(files))
if not files:
try:
os.makedirs(self.lock_path)
except OSError:
pass
else: # PID lock already here --- someone else will remove it.
self.remove = False
def __exit__(self, exc_type, exc_value, traceback):
if self.remove:
for path in self.lock_path, self.path:
try:
os.rmdir(path)
except OSError:
pass
| conda/egg_info.py
--- a/conda/egg_info.py
+++ b/conda/egg_info.py
@@ -15,6 +15,7 @@
from .misc import rel_path
+
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
conda/exceptions.py
--- a/conda/exceptions.py
+++ b/conda/exceptions.py
@@ -8,6 +8,3 @@ class InvalidInstruction(CondaException):
def __init__(self, instruction, *args, **kwargs):
msg = "No handler for instruction: %r" % instruction
super(InvalidInstruction, self).__init__(msg, *args, **kwargs)
-
-class LockError(RuntimeError, CondaException):
- pass
conda/lock.py
--- a/conda/lock.py
+++ b/conda/lock.py
@@ -17,11 +17,11 @@
"""
from __future__ import absolute_import, division, print_function
-import logging
import os
-import time
-
-from .exceptions import LockError
+import logging
+from os.path import join
+import glob
+from time import sleep
LOCKFN = '.conda_lock'
@@ -33,13 +33,15 @@ class Locked(object):
"""
Context manager to handle locks.
"""
- def __init__(self, path, retries=10):
+ def __init__(self, path):
self.path = path
self.end = "-" + str(os.getpid())
- self.lock_path = os.path.join(self.path, LOCKFN + self.end)
- self.retries = retries
+ self.lock_path = join(self.path, LOCKFN + self.end)
+ self.pattern = join(self.path, LOCKFN + '-*')
+ self.remove = True
def __enter__(self):
+ retries = 10
# Keep the string "LOCKERROR" in this string so that external
# programs can look for it.
lockstr = ("""\
@@ -48,24 +50,33 @@ def __enter__(self):
If you are sure that conda is not running, remove it and try again.
You can also use: $ conda clean --lock\n""")
sleeptime = 1
-
- for _ in range(self.retries):
- if os.path.isdir(self.lock_path):
- stdoutlog.info(lockstr % self.lock_path)
+ files = None
+ while retries:
+ files = glob.glob(self.pattern)
+ if files and not files[0].endswith(self.end):
+ stdoutlog.info(lockstr % str(files))
stdoutlog.info("Sleeping for %s seconds\n" % sleeptime)
-
- time.sleep(sleeptime)
+ sleep(sleeptime)
sleeptime *= 2
+ retries -= 1
else:
- os.makedirs(self.lock_path)
- return self
+ break
+ else:
+ stdoutlog.error("Exceeded max retries, giving up")
+ raise RuntimeError(lockstr % str(files))
- stdoutlog.error("Exceeded max retries, giving up")
- raise LockError(lockstr % self.lock_path)
+ if not files:
+ try:
+ os.makedirs(self.lock_path)
+ except OSError:
+ pass
+ else: # PID lock already here --- someone else will remove it.
+ self.remove = False
def __exit__(self, exc_type, exc_value, traceback):
- try:
- os.rmdir(self.lock_path)
- os.rmdir(self.path)
- except OSError:
- pass
+ if self.remove:
+ for path in self.lock_path, self.path:
+ try:
+ os.rmdir(path)
+ except OSError:
+ pass |
conda config needs --prepend; change behavior of --add to --append
referencing https://github.com/conda/conda/issues/2841
- conda config needs `--prepend`
- change behavior of `--add` to `--append`
- un-reverse order of `conda config --get channels`
| conda/cli/main_config.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from .common import (Completer, add_parser_json, stdout_json_success)
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
from ..utils import yaml_load, yaml_dump
from ..exceptions import (CondaTypeError, CondaValueError, CondaError,
CondaKeyError, CouldntParseError)
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar=('KEY'),
choices=BoolOrListKey()
)
action.add_argument(
"--add",
nargs=2,
action="append",
help="""Add one configuration value to the beginning of a list key.
To add to the end of the list, use --append.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--append",
nargs=2,
action="append",
help="""Add one configuration value to a list key. The default
behavior is to prepend.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParseError, NotImplementedError) as e:
raise CondaError(e, args.json)
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh) or {}
else:
rc_config = {}
# Get
if args.get is not None:
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if args.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else: # assume the key is a list-type
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
items = rc_config.get(key, [])
numitems = len(items)
for q, item in enumerate(reversed(items)):
# Use repr so that it can be pasted back in to conda config --add
if key == "channels" and q in (0, numitems-1):
print("--add", key, repr(item),
" # lowest priority" if q == 0 else " # highest priority")
else:
print("--add", key, repr(item))
# Add, append
for arg, prepend in zip((args.add, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
if key not in rc_list_keys:
raise CondaValueError("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key),
args.json)
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParseError("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
arglist = rc_config.setdefault(key, [])
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
item, key, "front" if prepend else "back")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
arglist.insert(0 if prepend else len(arglist), item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
yamlitem = yaml_load(item)
if key in set_bools:
if not isinstance(yamlitem, bool):
raise CondaTypeError("Key: %s; %s is not a YAML boolean." %
(key, item), args.json)
rc_config[key] = yamlitem
elif key in set_strings:
rc_config[key] = yamlitem
else:
raise CondaValueError("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key),
args.json)
# Remove
for key, item in args.remove:
if key not in rc_config:
if key != 'channels':
raise CondaKeyError("key %r is not in the config file" %
key, args.json)
rc_config[key] = ['defaults']
if item not in rc_config[key]:
raise CondaKeyError("%r is not in the %r key of the config file" %
(item, key), args.json)
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
raise CondaKeyError("key %r is not in the config file" %
key, args.json)
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if args.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
<|code_end|>
conda/exceptions.py
<|code_start|>
from __future__ import absolute_import, division, print_function
import sys
from traceback import format_exc
from .compat import iteritems, iterkeys
class CondaError(Exception):
def __init__(self, *args, **kwargs):
super(CondaError, self).__init__(*args, **kwargs)
def __repr__(self):
ret_str = ' '.join([str(arg) for arg in self.args if not isinstance(arg, bool)])
return ret_str
def __str__(self):
ret_str = ' '.join([str(arg) for arg in self.args if not isinstance(arg, bool)])
return ret_str
class InvalidInstruction(CondaError):
def __init__(self, instruction, *args, **kwargs):
msg = "No handler for instruction: %r\n" % instruction
super(InvalidInstruction, self).__init__(msg, *args, **kwargs)
class LockError(CondaError, RuntimeError):
def __init__(self, message, *args, **kwargs):
msg = "Lock error: %s" % message
super(LockError, self).__init__(msg, *args, **kwargs)
class ArgumentError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Argument Error: %s\n' % message
super(ArgumentError, self).__init__(msg, *args, **kwargs)
class ArgumentNotFoundError(ArgumentError):
def __init__(self, argument, *args, **kwargs):
msg = 'Argument not found: %s\n' % argument
super(ArgumentNotFoundError, self).__init__(msg, *args, **kwargs)
class TooManyArgumentsError(ArgumentError):
def __init__(self, message, *args, **kwargs):
msg = 'Too many arguments: %s\n' % message
super(TooManyArgumentsError, self).__init__(msg, *args, **kwargs)
class TooFewArgumentsError(ArgumentError):
def __init__(self, message, *args, **kwargs):
msg = 'Too few arguments: %s\n' % message
super(TooFewArgumentsError, self).__init__(msg, *args, **kwargs)
class CommandError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Command Error: %s\n' % message
super(CommandError, self).__init__(msg, *args, **kwargs)
class CommandNotFoundError(CommandError):
def __init__(self, message, *args, **kwargs):
msg = 'Command not found: %s\n' % message
super(CommandNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaFileNotFoundError(CondaError, OSError):
def __init__(self, message, *args, **kwargs):
msg = "File not found: %s\n" % message
super(CondaFileNotFoundError, self).__init__(msg, *args, **kwargs)
class DirectoryNotFoundError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Directory not found: %s\n' % message
super(DirectoryNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaEnvironmentError(CondaError, EnvironmentError):
def __init__(self, message, *args, **kwargs):
msg = 'Environment not found: %s\n' % message
super(CondaEnvironmentError, self).__init__(msg, *args, **kwargs)
class DryRunExit(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Dry run: exiting\n'
super(DryRunExit, self).__init__(msg, *args, **kwargs)
class CondaSystemExit(CondaError, SystemExit):
def __init__(self, *args, **kwargs):
super(CondaSystemExit, self).__init__(*args, **kwargs)
class SubprocessExit(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Subprocess exiting\n'
super(SubprocessExit, self).__init__(msg, *args, **kwargs)
class PaddingError(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Padding error:\n'
super(PaddingError, self).__init__(msg, *args, **kwargs)
class LinkError(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Link error\n'
super(LinkError, self).__init__(msg, *args, **kwargs)
class CondaOSError(CondaError, OSError):
def __init__(self, message, *args, **kwargs):
msg = 'OS error: %s\n' % message
super(CondaOSError, self).__init__(msg, *args, **kwargs)
class AlreadyInitializedError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = message + '\n'
super(AlreadyInitializedError, self).__init__(msg, *args, **kwargs)
class ProxyError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Proxy error: %s\n' % message
super(ProxyError, self).__init__(msg, *args, **kwargs)
class CondaIOError(CondaError, IOError):
def __init__(self, message, *args, **kwargs):
msg = 'IO error: %s\n' % message
super(CondaIOError, self).__init__(msg, *args, **kwargs)
class CondaFileIOError(CondaIOError):
def __init__(self, message, *args, **kwargs):
msg = "Couldn't read or write to file. %s\n" % message
super(CondaFileIOError, self).__init__(msg, *args, **kwargs)
class CondaKeyError(CondaError, KeyError):
def __init__(self, message, *args, **kwargs):
self.msg = 'Key error: %s\n' % message
super(CondaKeyError, self).__init__(self.msg, *args, **kwargs)
class ChannelError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Channel Error: %s\n' % message
super(ChannelError, self).__init__(msg, *args, **kwargs)
class ChannelNotAllowed(ChannelError):
def __init__(self, message, *args, **kwargs):
msg = 'Channel not allowed: %s\n' % message
super(ChannelNotAllowed, self).__init__(msg, *args, **kwargs)
class CondaImportError(CondaError, ImportError):
def __init__(self, message, *args, **kwargs):
msg = 'Import error: %s\n' % message
super(CondaImportError, self).__init__(msg, *args, **kwargs)
class ParseError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Parse error: %s\n' % message
super(ParseError, self).__init__(msg, *args, **kwargs)
class CouldntParseError(ParseError):
def __init__(self, reason, *args, **kwargs):
self.args = ["""Error: Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s\n""" % reason]
super(CouldntParseError, self).__init__(self.args[0], *args, **kwargs)
def __repr__(self):
return self.args[0]
class MD5MismatchError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'MD5MismatchError: %s\n' % message
super(MD5MismatchError, self).__init__(msg, *args, **kwargs)
class PackageNotFoundError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Package not found: %s\n' % message
super(PackageNotFoundError, self).__init__(msg, *args, **kwargs)
class NoPackagesFoundError(CondaError, RuntimeError):
'''An exception to report that requested packages are missing.
Args:
bad_deps: a list of tuples of MatchSpecs, assumed to be dependency
chains, from top level to bottom.
Returns:
Raises an exception with a formatted message detailing the
missing packages and/or dependencies.
'''
def __init__(self, bad_deps, *args, **kwargs):
from .resolve import dashlist
from .config import subdir
deps = set(q[-1].spec for q in bad_deps)
if all(len(q) > 1 for q in bad_deps):
what = "Dependencies" if len(bad_deps) > 1 else "Dependency"
elif all(len(q) == 1 for q in bad_deps):
what = "Packages" if len(bad_deps) > 1 else "Package"
else:
what = "Packages/dependencies"
bad_deps = dashlist(' -> '.join(map(str, q)) for q in bad_deps)
msg = '%s missing in current %s channels: %s\n' % (what, subdir, bad_deps)
super(NoPackagesFoundError, self).__init__(msg, *args, **kwargs)
self.pkgs = deps
class UnsatisfiableError(CondaError, RuntimeError):
'''An exception to report unsatisfiable dependencies.
Args:
bad_deps: a list of tuples of objects (likely MatchSpecs).
chains: (optional) if True, the tuples are interpreted as chains
of dependencies, from top level to bottom. If False, the tuples
are interpreted as simple lists of conflicting specs.
Returns:
Raises an exception with a formatted message detailing the
unsatisfiable specifications.
'''
def __init__(self, bad_deps, chains=True, *args, **kwargs):
from .resolve import dashlist, MatchSpec
bad_deps = [list(map(lambda x: x.spec, dep)) for dep in bad_deps]
if chains:
chains = {}
for dep in sorted(bad_deps, key=len, reverse=True):
dep1 = [str(MatchSpec(s)).partition(' ') for s in dep[1:]]
key = (dep[0],) + tuple(v[0] for v in dep1)
vals = ('',) + tuple(v[2] for v in dep1)
found = False
for key2, csets in iteritems(chains):
if key2[:len(key)] == key:
for cset, val in zip(csets, vals):
cset.add(val)
found = True
if not found:
chains[key] = [{val} for val in vals]
bad_deps = []
for key, csets in iteritems(chains):
deps = []
for name, cset in zip(key, csets):
if '' not in cset:
pass
elif len(cset) == 1:
cset.clear()
else:
cset.remove('')
cset.add('*')
if name[0] == '@':
name = 'feature:' + name[1:]
deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name)
chains[key] = ' -> '.join(deps)
bad_deps = [chains[key] for key in sorted(iterkeys(chains))]
msg = '''The following specifications were found to be in conflict:%s
Use "conda info <package>" to see the dependencies for each package.'''
else:
bad_deps = [sorted(dep) for dep in bad_deps]
bad_deps = [', '.join(dep) for dep in sorted(bad_deps)]
msg = '''The following specifications were found to be incompatible with the
others, or with the existing package set:%s
Use "conda info <package>" to see the dependencies for each package.'''
msg = msg % dashlist(bad_deps) + '\n'
super(UnsatisfiableError, self).__init__(msg, *args, **kwargs)
class InstallError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Install error: %s\n' % message
super(InstallError, self).__init__(msg, *args, **kwargs)
class RemoveError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'RemoveError: %s\n' % message
super(RemoveError, self).__init__(msg, *args, **kwargs)
class CondaIndexError(CondaError, IndexError):
def __init__(self, message, *args, **kwargs):
msg = 'Index error: %s\n' % message
super(CondaIndexError, self).__init__(msg, *args, **kwargs)
class CondaRuntimeError(CondaError, RuntimeError):
def __init__(self, message, *args, **kwargs):
msg = 'Runtime error: %s\n' % message
super(CondaRuntimeError, self).__init__(msg, *args, **kwargs)
class CondaValueError(CondaError, ValueError):
def __init__(self, message, *args, **kwargs):
msg = 'Value error: %s\n' % message
super(CondaValueError, self).__init__(msg, *args, **kwargs)
class CondaTypeError(CondaError, TypeError):
def __init__(self, message, *args, **kwargs):
msg = 'Type error: %s\n' % message
super(CondaTypeError, self).__init__(msg, *args, **kwargs)
class CondaAssertionError(CondaError, AssertionError):
def __init__(self, message, *args, **kwargs):
msg = 'Assertion error: %s\n' % message
super(CondaAssertionError, self).__init__(msg, *args, **kwargs)
class CondaHistoryError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'History error: %s\n' % message
super(CondaHistoryError, self).__init__(msg, *args, **kwargs)
class CondaSignatureError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Signature error: %s\n' % message
super(CondaSignatureError, self).__init__(msg, *args, **kwargs)
def print_exception(exception):
from conda.config import output_json
from conda.cli.common import stdout_json
from sys import stderr
message = repr(exception)
if output_json:
stdout_json(dict(error=message))
else:
stderr.write(message)
def get_info():
from StringIO import StringIO
from contextlib import contextmanager
from conda.cli import conda_argparse
from conda.cli.main_info import configure_parser
class CapturedText(object):
pass
@contextmanager
def captured():
stdout, stderr = sys.stdout, sys.stderr
sys.stdout = outfile = StringIO()
sys.stderr = errfile = StringIO()
c = CapturedText()
yield c
c.stdout, c.stderr = outfile.getvalue(), errfile.getvalue()
sys.stdout, sys.stderr = stdout, stderr
p = conda_argparse.ArgumentParser()
sub_parsers = p.add_subparsers(metavar='command', dest='cmd')
configure_parser(sub_parsers)
from shlex import split
args = p.parse_args(split("info"))
with captured() as c:
args.func(args, p)
return c.stdout, c.stderr
def print_unexpected_error_message(e):
traceback = format_exc()
from conda.config import output_json
if not output_json and e.__class__.__name__ not in ('ScannerError', 'ParserError'):
message = """\
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
"""
else:
message = ''
print(message)
info_stdout, info_stderr = get_info()
print(info_stdout if info_stdout else info_stderr)
print("`$ {0}`".format(' '.join(sys.argv)))
print('\n')
if output_json:
from conda.cli.common import stdout_json
stdout_json(dict(error=traceback))
else:
print('\n'.join(' ' + line for line in traceback.splitlines()))
def conda_exception_handler(func, *args, **kwargs):
try:
return_value = func(*args, **kwargs)
if isinstance(return_value, int):
return return_value
except CondaRuntimeError as e:
print_unexpected_error_message(e)
return 1
except CondaError as e:
print_exception(e)
return 1
except Exception as e:
print_unexpected_error_message(e)
return 1
<|code_end|>
conda/fetch.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import tempfile
import warnings
from functools import wraps
from logging import getLogger
from os.path import basename, dirname, join
from .compat import itervalues, input, urllib_quote, iterkeys, iteritems
from .config import (pkgs_dirs, DEFAULT_CHANNEL_ALIAS, remove_binstar_tokens,
hide_binstar_tokens, allowed_channels, add_pip_as_python_dependency,
ssl_verify, rc, prioritize_channels, url_channel, offline_keep)
from .connection import CondaSession, unparse_url, RETRIES
from .install import (add_cached_package, find_new_location, package_cache, dist2pair,
rm_rf, exp_backoff_fn)
from .lock import Locked as Locked
from .utils import memoized
from .exceptions import ProxyError, ChannelNotAllowed, CondaRuntimeError, CondaSignatureError
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not offline_keep(url):
return {'packages': {}}
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url or url.startswith("file://"):
filename = 'repodata.json.bz2'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
resp = session.get(url + filename, headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
if filename.endswith('.bz2'):
json_str = bz2.decompress(resp.content).decode('utf-8')
else:
json_str = resp.content.decode('utf-8')
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise CondaRuntimeError("Invalid index file: {0}{1}: {2}"
.format(remove_binstar_tokens(url), filename, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
user = remove_binstar_tokens(url) \
.split(DEFAULT_CHANNEL_ALIAS)[1] \
.split("/")[0]
msg = 'Could not find anaconda.org user %s' % user
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and
rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
raise CondaRuntimeError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise CondaRuntimeError(msg)
cache['_url'] = remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
raise ProxyError("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif meta.get('url'):
url = meta['url']
elif meta.get('channel'):
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = url_channel(url)
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
if not isinstance(channel_urls, dict):
channel_urls = prioritize_channels(channel_urls)
for url in iterkeys(channel_urls):
if allowed_channels and url not in allowed_channels:
raise ChannelNotAllowed("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(allowed_channels)))
urls = tuple(filter(offline_keep, channel_urls))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError):
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
else:
try:
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except CondaSignatureError:
raise
raise CondaSignatureError("Error: Signature for '%s' is invalid." %
(basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None):
assert "::" not in str(url), url
assert "::" not in str(dst_path), str(dst_path)
if not offline_keep(url):
raise RuntimeError("Cannot download in offline mode: %s" % (url,))
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_path):
rm_rf(dst_path)
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise CondaRuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise CondaRuntimeError(msg)
except IOError as e:
raise CondaRuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
index = 0
for chunk in resp.iter_content(2**14):
index += len(chunk)
try:
fo.write(chunk)
except IOError:
raise CondaRuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
if size and 0 <= index <= size:
getLogger('fetch.update').info(index)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise CondaRuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise CondaRuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
exp_backoff_fn(os.rename, pp, dst_path)
except OSError as e:
raise CondaRuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
| conda/cli/main_config.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from .common import (Completer, add_parser_json, stdout_json_success)
from ..compat import string_types
from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path,
user_rc_path, rc_other)
from ..utils import yaml_load, yaml_dump
from ..exceptions import (CondaTypeError, CondaValueError, CondaError,
CondaKeyError, CouldntParseError)
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar=('KEY'),
choices=BoolOrListKey()
)
action.add_argument(
"--append", "--add",
nargs=2,
action="append",
help="""Add one configuration value to the end of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--prepend",
nargs=2,
action="append",
help="""Add one configuration value to the beginning of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParseError, NotImplementedError) as e:
raise CondaError(e, args.json)
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh) or {}
else:
rc_config = {}
# Get
if args.get is not None:
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if args.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else: # assume the key is a list-type
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
items = rc_config.get(key, [])
numitems = len(items)
for q, item in enumerate(items):
# Use repr so that it can be pasted back in to conda config --add
if key == "channels" and q in (0, numitems-1):
print("--add", key, repr(item),
" # lowest priority" if q == 0 else " # highest priority")
else:
print("--add", key, repr(item))
# prepend, append, add
for arg, prepend in zip((args.prepend, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
if key not in rc_list_keys:
raise CondaValueError("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key),
args.json)
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParseError("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
arglist = rc_config.setdefault(key, [])
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
item, key, "top" if prepend else "bottom")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not args.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
arglist.insert(0 if prepend else len(arglist), item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
yamlitem = yaml_load(item)
if key in set_bools:
if not isinstance(yamlitem, bool):
raise CondaTypeError("Key: %s; %s is not a YAML boolean." %
(key, item), args.json)
rc_config[key] = yamlitem
elif key in set_strings:
rc_config[key] = yamlitem
else:
raise CondaValueError("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key),
args.json)
# Remove
for key, item in args.remove:
if key not in rc_config:
if key != 'channels':
raise CondaKeyError("key %r is not in the config file" %
key, args.json)
rc_config[key] = ['defaults']
if item not in rc_config[key]:
raise CondaKeyError("%r is not in the %r key of the config file" %
(item, key), args.json)
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
raise CondaKeyError("key %r is not in the config file" %
key, args.json)
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if args.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
conda/exceptions.py
from __future__ import absolute_import, division, print_function
import sys
from traceback import format_exc
from .compat import iteritems, iterkeys
class CondaError(Exception):
def __init__(self, *args, **kwargs):
super(CondaError, self).__init__(*args, **kwargs)
def __repr__(self):
ret_str = ' '.join([str(arg) for arg in self.args if not isinstance(arg, bool)])
return ret_str
def __str__(self):
ret_str = ' '.join([str(arg) for arg in self.args if not isinstance(arg, bool)])
return ret_str
class InvalidInstruction(CondaError):
def __init__(self, instruction, *args, **kwargs):
msg = "No handler for instruction: %r\n" % instruction
super(InvalidInstruction, self).__init__(msg, *args, **kwargs)
class LockError(CondaError, RuntimeError):
def __init__(self, message, *args, **kwargs):
msg = "Lock error: %s" % message
super(LockError, self).__init__(msg, *args, **kwargs)
class ArgumentError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Argument Error: %s\n' % message
super(ArgumentError, self).__init__(msg, *args, **kwargs)
class ArgumentNotFoundError(ArgumentError):
def __init__(self, argument, *args, **kwargs):
msg = 'Argument not found: %s\n' % argument
super(ArgumentNotFoundError, self).__init__(msg, *args, **kwargs)
class TooManyArgumentsError(ArgumentError):
def __init__(self, message, *args, **kwargs):
msg = 'Too many arguments: %s\n' % message
super(TooManyArgumentsError, self).__init__(msg, *args, **kwargs)
class TooFewArgumentsError(ArgumentError):
def __init__(self, message, *args, **kwargs):
msg = 'Too few arguments: %s\n' % message
super(TooFewArgumentsError, self).__init__(msg, *args, **kwargs)
class CommandError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Command Error: %s\n' % message
super(CommandError, self).__init__(msg, *args, **kwargs)
class CommandNotFoundError(CommandError):
def __init__(self, message, *args, **kwargs):
msg = 'Command not found: %s\n' % message
super(CommandNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaFileNotFoundError(CondaError, OSError):
def __init__(self, message, *args, **kwargs):
msg = "File not found: %s\n" % message
super(CondaFileNotFoundError, self).__init__(msg, *args, **kwargs)
class DirectoryNotFoundError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Directory not found: %s\n' % message
super(DirectoryNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaEnvironmentError(CondaError, EnvironmentError):
def __init__(self, message, *args, **kwargs):
msg = 'Environment not found: %s\n' % message
super(CondaEnvironmentError, self).__init__(msg, *args, **kwargs)
class DryRunExit(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Dry run: exiting\n'
super(DryRunExit, self).__init__(msg, *args, **kwargs)
class CondaSystemExit(CondaError, SystemExit):
def __init__(self, *args, **kwargs):
super(CondaSystemExit, self).__init__(*args, **kwargs)
class SubprocessExit(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Subprocess exiting\n'
super(SubprocessExit, self).__init__(msg, *args, **kwargs)
class PaddingError(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Padding error:\n'
super(PaddingError, self).__init__(msg, *args, **kwargs)
class LinkError(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Link error\n'
super(LinkError, self).__init__(msg, *args, **kwargs)
class CondaOSError(CondaError, OSError):
def __init__(self, message, *args, **kwargs):
msg = 'OS error: %s\n' % message
super(CondaOSError, self).__init__(msg, *args, **kwargs)
class AlreadyInitializedError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = message + '\n'
super(AlreadyInitializedError, self).__init__(msg, *args, **kwargs)
class ProxyError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Proxy error: %s\n' % message
super(ProxyError, self).__init__(msg, *args, **kwargs)
class CondaIOError(CondaError, IOError):
def __init__(self, message, *args, **kwargs):
msg = 'IO error: %s\n' % message
super(CondaIOError, self).__init__(msg, *args, **kwargs)
class CondaFileIOError(CondaIOError):
def __init__(self, message, *args, **kwargs):
msg = "Couldn't read or write to file. %s\n" % message
super(CondaFileIOError, self).__init__(msg, *args, **kwargs)
class CondaKeyError(CondaError, KeyError):
def __init__(self, message, *args, **kwargs):
self.msg = 'Key error: %s\n' % message
super(CondaKeyError, self).__init__(self.msg, *args, **kwargs)
class ChannelError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Channel Error: %s\n' % message
super(ChannelError, self).__init__(msg, *args, **kwargs)
class ChannelNotAllowed(ChannelError):
def __init__(self, message, *args, **kwargs):
msg = 'Channel not allowed: %s\n' % message
super(ChannelNotAllowed, self).__init__(msg, *args, **kwargs)
class CondaImportError(CondaError, ImportError):
def __init__(self, message, *args, **kwargs):
msg = 'Import error: %s\n' % message
super(CondaImportError, self).__init__(msg, *args, **kwargs)
class ParseError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Parse error: %s\n' % message
super(ParseError, self).__init__(msg, *args, **kwargs)
class CouldntParseError(ParseError):
def __init__(self, reason, *args, **kwargs):
self.args = ["""Error: Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s\n""" % reason]
super(CouldntParseError, self).__init__(self.args[0], *args, **kwargs)
def __repr__(self):
return self.args[0]
class MD5MismatchError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'MD5MismatchError: %s\n' % message
super(MD5MismatchError, self).__init__(msg, *args, **kwargs)
class PackageNotFoundError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Package not found: %s\n' % message
super(PackageNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaHTTPError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'HTTP Error: %s\n' % message
super(CondaHTTPError, self).__init__(msg, *args, **kwargs)
class NoPackagesFoundError(CondaError, RuntimeError):
'''An exception to report that requested packages are missing.
Args:
bad_deps: a list of tuples of MatchSpecs, assumed to be dependency
chains, from top level to bottom.
Returns:
Raises an exception with a formatted message detailing the
missing packages and/or dependencies.
'''
def __init__(self, bad_deps, *args, **kwargs):
from .resolve import dashlist
from .config import subdir
deps = set(q[-1].spec for q in bad_deps)
if all(len(q) > 1 for q in bad_deps):
what = "Dependencies" if len(bad_deps) > 1 else "Dependency"
elif all(len(q) == 1 for q in bad_deps):
what = "Packages" if len(bad_deps) > 1 else "Package"
else:
what = "Packages/dependencies"
bad_deps = dashlist(' -> '.join(map(str, q)) for q in bad_deps)
msg = '%s missing in current %s channels: %s\n' % (what, subdir, bad_deps)
super(NoPackagesFoundError, self).__init__(msg, *args, **kwargs)
self.pkgs = deps
class UnsatisfiableError(CondaError, RuntimeError):
'''An exception to report unsatisfiable dependencies.
Args:
bad_deps: a list of tuples of objects (likely MatchSpecs).
chains: (optional) if True, the tuples are interpreted as chains
of dependencies, from top level to bottom. If False, the tuples
are interpreted as simple lists of conflicting specs.
Returns:
Raises an exception with a formatted message detailing the
unsatisfiable specifications.
'''
def __init__(self, bad_deps, chains=True, *args, **kwargs):
from .resolve import dashlist, MatchSpec
bad_deps = [list(map(lambda x: x.spec, dep)) for dep in bad_deps]
if chains:
chains = {}
for dep in sorted(bad_deps, key=len, reverse=True):
dep1 = [str(MatchSpec(s)).partition(' ') for s in dep[1:]]
key = (dep[0],) + tuple(v[0] for v in dep1)
vals = ('',) + tuple(v[2] for v in dep1)
found = False
for key2, csets in iteritems(chains):
if key2[:len(key)] == key:
for cset, val in zip(csets, vals):
cset.add(val)
found = True
if not found:
chains[key] = [{val} for val in vals]
bad_deps = []
for key, csets in iteritems(chains):
deps = []
for name, cset in zip(key, csets):
if '' not in cset:
pass
elif len(cset) == 1:
cset.clear()
else:
cset.remove('')
cset.add('*')
if name[0] == '@':
name = 'feature:' + name[1:]
deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name)
chains[key] = ' -> '.join(deps)
bad_deps = [chains[key] for key in sorted(iterkeys(chains))]
msg = '''The following specifications were found to be in conflict:%s
Use "conda info <package>" to see the dependencies for each package.'''
else:
bad_deps = [sorted(dep) for dep in bad_deps]
bad_deps = [', '.join(dep) for dep in sorted(bad_deps)]
msg = '''The following specifications were found to be incompatible with the
others, or with the existing package set:%s
Use "conda info <package>" to see the dependencies for each package.'''
msg = msg % dashlist(bad_deps) + '\n'
super(UnsatisfiableError, self).__init__(msg, *args, **kwargs)
class InstallError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Install error: %s\n' % message
super(InstallError, self).__init__(msg, *args, **kwargs)
class RemoveError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'RemoveError: %s\n' % message
super(RemoveError, self).__init__(msg, *args, **kwargs)
class CondaIndexError(CondaError, IndexError):
def __init__(self, message, *args, **kwargs):
msg = 'Index error: %s\n' % message
super(CondaIndexError, self).__init__(msg, *args, **kwargs)
class CondaRuntimeError(CondaError, RuntimeError):
def __init__(self, message, *args, **kwargs):
msg = 'Runtime error: %s\n' % message
super(CondaRuntimeError, self).__init__(msg, *args, **kwargs)
class CondaValueError(CondaError, ValueError):
def __init__(self, message, *args, **kwargs):
msg = 'Value error: %s\n' % message
super(CondaValueError, self).__init__(msg, *args, **kwargs)
class CondaTypeError(CondaError, TypeError):
def __init__(self, message, *args, **kwargs):
msg = 'Type error: %s\n' % message
super(CondaTypeError, self).__init__(msg, *args, **kwargs)
class CondaAssertionError(CondaError, AssertionError):
def __init__(self, message, *args, **kwargs):
msg = 'Assertion error: %s\n' % message
super(CondaAssertionError, self).__init__(msg, *args, **kwargs)
class CondaHistoryError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'History error: %s\n' % message
super(CondaHistoryError, self).__init__(msg, *args, **kwargs)
class CondaSignatureError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Signature error: %s\n' % message
super(CondaSignatureError, self).__init__(msg, *args, **kwargs)
def print_exception(exception):
from conda.config import output_json
from conda.cli.common import stdout_json
from sys import stderr
message = repr(exception)
if output_json:
stdout_json(dict(error=message))
else:
stderr.write(message)
def get_info():
from conda.compat import StringIO
from contextlib import contextmanager
from conda.cli import conda_argparse
from conda.cli.main_info import configure_parser
class CapturedText(object):
pass
@contextmanager
def captured():
stdout, stderr = sys.stdout, sys.stderr
sys.stdout = outfile = StringIO()
sys.stderr = errfile = StringIO()
c = CapturedText()
yield c
c.stdout, c.stderr = outfile.getvalue(), errfile.getvalue()
sys.stdout, sys.stderr = stdout, stderr
p = conda_argparse.ArgumentParser()
sub_parsers = p.add_subparsers(metavar='command', dest='cmd')
configure_parser(sub_parsers)
from shlex import split
args = p.parse_args(split("info"))
with captured() as c:
args.func(args, p)
return c.stdout, c.stderr
def print_unexpected_error_message(e):
traceback = format_exc()
from conda.config import output_json
if not output_json and e.__class__.__name__ not in ('ScannerError', 'ParserError'):
message = """\
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
"""
else:
message = ''
print(message)
info_stdout, info_stderr = get_info()
print(info_stdout if info_stdout else info_stderr)
print("`$ {0}`".format(' '.join(sys.argv)))
print('\n')
if output_json:
from conda.cli.common import stdout_json
stdout_json(dict(error=traceback))
else:
print('\n'.join(' ' + line for line in traceback.splitlines()))
def conda_exception_handler(func, *args, **kwargs):
try:
return_value = func(*args, **kwargs)
if isinstance(return_value, int):
return return_value
except CondaRuntimeError as e:
print_unexpected_error_message(e)
return 1
except CondaError as e:
print_exception(e)
return 1
except Exception as e:
print_unexpected_error_message(e)
return 1
conda/fetch.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import tempfile
import warnings
from functools import wraps
from logging import getLogger
from os.path import basename, dirname, join
from .compat import itervalues, input, urllib_quote, iterkeys, iteritems
from .config import (pkgs_dirs, DEFAULT_CHANNEL_ALIAS, remove_binstar_tokens,
hide_binstar_tokens, allowed_channels, add_pip_as_python_dependency,
ssl_verify, rc, prioritize_channels, url_channel, offline_keep)
from .connection import CondaSession, unparse_url, RETRIES
from .install import (add_cached_package, find_new_location, package_cache, dist2pair,
rm_rf, exp_backoff_fn)
from .lock import Locked as Locked
from .utils import memoized
from .exceptions import ProxyError, ChannelNotAllowed, CondaRuntimeError, CondaSignatureError, \
CondaError, CondaHTTPError
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not offline_keep(url):
return {'packages': {}}
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url or url.startswith("file://"):
filename = 'repodata.json.bz2'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
resp = session.get(url + filename, headers=headers, proxies=session.proxies)
resp.raise_for_status()
if resp.status_code != 304:
if filename.endswith('.bz2'):
json_str = bz2.decompress(resp.content).decode('utf-8')
else:
json_str = resp.content.decode('utf-8')
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise CondaRuntimeError("Invalid index file: {0}{1}: {2}"
.format(remove_binstar_tokens(url), filename, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.startswith(DEFAULT_CHANNEL_ALIAS):
user = remove_binstar_tokens(url) \
.split(DEFAULT_CHANNEL_ALIAS)[1] \
.split("/")[0]
msg = 'Could not find anaconda.org user %s' % user
else:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % remove_binstar_tokens(url)
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif (e.response.status_code == 401 and
rc.get('channel_alias', DEFAULT_CHANNEL_ALIAS) in url):
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(hide_binstar_tokens(url), e))
stderrlog.info(msg)
return fetch_repodata(remove_binstar_tokens(url),
cache_dir=cache_dir,
use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
raise CondaHTTPError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, remove_binstar_tokens(url))
stderrlog.info('Could not connect to %s\n' % remove_binstar_tokens(url))
log.debug(msg)
if fail_unknown_host:
raise CondaRuntimeError(msg)
cache['_url'] = remove_binstar_tokens(url)
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
raise ProxyError("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
def add_username_and_pass_to_url(url, username, passwd):
urlparts = list(requests.packages.urllib3.util.url.parse_url(url))
passwd = urllib_quote(passwd, '')
urlparts[1] = username + ':' + passwd
return unparse_url(urlparts)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif meta.get('url'):
url = meta['url']
elif meta.get('channel'):
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = url_channel(url)
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
if not isinstance(channel_urls, dict):
channel_urls = prioritize_channels(channel_urls)
for url in iterkeys(channel_urls):
if allowed_channels and url not in allowed_channels:
raise ChannelNotAllowed("""
Error: URL '%s' not in allowed channels.
Allowed channels are:
- %s
""" % (url, '\n - '.join(allowed_channels)))
urls = tuple(filter(offline_keep, channel_urls))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError):
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
else:
try:
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except CondaSignatureError:
raise
raise CondaSignatureError("Error: Signature for '%s' is invalid." %
(basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None):
assert "::" not in str(url), url
assert "::" not in str(dst_path), str(dst_path)
if not offline_keep(url):
raise RuntimeError("Cannot download in offline mode: %s" % (url,))
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with Locked(dst_path):
rm_rf(dst_path)
try:
resp = session.get(url, stream=True, proxies=session.proxies)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise CondaRuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise CondaRuntimeError(msg)
except IOError as e:
raise CondaRuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
index = 0
for chunk in resp.iter_content(2**14):
index += len(chunk)
try:
fo.write(chunk)
except IOError:
raise CondaRuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
if size and 0 <= index <= size:
getLogger('fetch.update').info(index)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise CondaRuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise CondaRuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
exp_backoff_fn(os.rename, pp, dst_path)
except OSError as e:
raise CondaRuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
| conda/cli/main_config.py
--- a/conda/cli/main_config.py
+++ b/conda/cli/main_config.py
@@ -151,21 +151,19 @@ def configure_parser(sub_parsers):
choices=BoolOrListKey()
)
action.add_argument(
- "--add",
+ "--append", "--add",
nargs=2,
action="append",
- help="""Add one configuration value to the beginning of a list key.
- To add to the end of the list, use --append.""",
+ help="""Add one configuration value to the end of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
- "--append",
+ "--prepend",
nargs=2,
action="append",
- help="""Add one configuration value to a list key. The default
- behavior is to prepend.""",
+ help="""Add one configuration value to the beginning of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
@@ -260,7 +258,7 @@ def execute_config(args, parser):
# recreate the same file
items = rc_config.get(key, [])
numitems = len(items)
- for q, item in enumerate(reversed(items)):
+ for q, item in enumerate(items):
# Use repr so that it can be pasted back in to conda config --add
if key == "channels" and q in (0, numitems-1):
print("--add", key, repr(item),
@@ -268,8 +266,8 @@ def execute_config(args, parser):
else:
print("--add", key, repr(item))
- # Add, append
- for arg, prepend in zip((args.add, args.append), (True, False)):
+ # prepend, append, add
+ for arg, prepend in zip((args.prepend, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
@@ -287,7 +285,7 @@ def execute_config(args, parser):
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
- item, key, "front" if prepend else "back")
+ item, key, "top" if prepend else "bottom")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not args.json:
print(message, file=sys.stderr)
conda/exceptions.py
--- a/conda/exceptions.py
+++ b/conda/exceptions.py
@@ -197,6 +197,12 @@ def __init__(self, message, *args, **kwargs):
super(PackageNotFoundError, self).__init__(msg, *args, **kwargs)
+class CondaHTTPError(CondaError):
+ def __init__(self, message, *args, **kwargs):
+ msg = 'HTTP Error: %s\n' % message
+ super(CondaHTTPError, self).__init__(msg, *args, **kwargs)
+
+
class NoPackagesFoundError(CondaError, RuntimeError):
'''An exception to report that requested packages are missing.
@@ -352,7 +358,7 @@ def print_exception(exception):
def get_info():
- from StringIO import StringIO
+ from conda.compat import StringIO
from contextlib import contextmanager
from conda.cli import conda_argparse
from conda.cli.main_info import configure_parser
conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -27,8 +27,8 @@
rm_rf, exp_backoff_fn)
from .lock import Locked as Locked
from .utils import memoized
-from .exceptions import ProxyError, ChannelNotAllowed, CondaRuntimeError, CondaSignatureError
-
+from .exceptions import ProxyError, ChannelNotAllowed, CondaRuntimeError, CondaSignatureError, \
+ CondaError, CondaHTTPError
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
@@ -159,7 +159,7 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url))
log.debug(msg)
- raise CondaRuntimeError(msg)
+ raise CondaHTTPError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e |
conda create causes: AttributeError: 'dict' object has no attribute 'add'
Trying to clone the root environment with:
```
conda create --clone root --copy -n myenv2
```
Gives:
```
Traceback (most recent call last):
File "/opt/miniconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 120, in main
exit_code = args_func(args, p)
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 130, in args_func
exit_code = args.func(args, p)
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 58, in execute
install(args, parser, 'create')
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 223, in install
clone(args.clone, prefix, json=args.json, quiet=args.quiet, index_args=index_args)
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 88, in clone
index_args=index_args)
File "/opt/miniconda/lib/python2.7/site-packages/conda/misc.py", line 360, in clone_env
force_extract=False, index_args=index_args)
File "/opt/miniconda/lib/python2.7/site-packages/conda/misc.py", line 109, in explicit
channels.add(channel)
AttributeError: 'dict' object has no attribute 'add'
```
conda info:
```
platform : linux-64
conda version : 4.1.8
conda-env version : 2.5.1
conda-build version : 1.18.1
python version : 2.7.12.final.0
requests version : 2.10.0
root environment : /opt/miniconda (read only)
default environment : /opt/miniconda
envs directories : /home/wiecki/.conda/envs
/home/wiecki/envs
/opt/miniconda/envs
package cache : /home/wiecki/.conda/envs/.pkgs
/home/wiecki/envs/.pkgs
/opt/miniconda/pkgs
channel URLs : https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : None
offline mode : False
is foreign system : False
```
conda create causes: AttributeError: 'dict' object has no attribute 'add'
Trying to clone the root environment with:
```
conda create --clone root --copy -n myenv2
```
Gives:
```
Traceback (most recent call last):
File "/opt/miniconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 120, in main
exit_code = args_func(args, p)
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 130, in args_func
exit_code = args.func(args, p)
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 58, in execute
install(args, parser, 'create')
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 223, in install
clone(args.clone, prefix, json=args.json, quiet=args.quiet, index_args=index_args)
File "/opt/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 88, in clone
index_args=index_args)
File "/opt/miniconda/lib/python2.7/site-packages/conda/misc.py", line 360, in clone_env
force_extract=False, index_args=index_args)
File "/opt/miniconda/lib/python2.7/site-packages/conda/misc.py", line 109, in explicit
channels.add(channel)
AttributeError: 'dict' object has no attribute 'add'
```
conda info:
```
platform : linux-64
conda version : 4.1.8
conda-env version : 2.5.1
conda-build version : 1.18.1
python version : 2.7.12.final.0
requests version : 2.10.0
root environment : /opt/miniconda (read only)
default environment : /opt/miniconda
envs directories : /home/wiecki/.conda/envs
/home/wiecki/envs
/opt/miniconda/envs
package cache : /home/wiecki/.conda/envs/.pkgs
/home/wiecki/envs/.pkgs
/opt/miniconda/pkgs
channel URLs : https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : None
offline mode : False
is foreign system : False
```
| conda/misc.py
<|code_start|>
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url, dist2filename)
from .compat import iteritems, itervalues
from .config import is_url, url_channel, root_dir, envs_dirs, subdir
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, url_path as utils_url_path
from .api import get_index
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
linked = {name_dist(dist): dist for dist in install_linked(prefix)}
index_args = index_args or {}
index = index or {}
verifies = []
channels = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
sys.exit('Error: file not found: %s' % join(url_p, fn))
url_p = utils_url_path(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# is_local: if the tarball is stored locally (file://)
# is_cache: if the tarball is sitting in our cache
is_local = url.startswith('file://')
prefix = cached_url(url) if is_local else None
is_cache = prefix is not None
if is_cache:
# Channel information from the cache
schannel = 'defaults' if prefix == '' else prefix[:-2]
else:
# Channel information from the URL
channel, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
# Add explicit file to index so we'll be sure to see it later
if is_local:
index[fn] = {'fn': dist2filename(fn), 'url': url, 'md5': md5}
verifies.append((fn, md5))
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
# Also remove explicit tarballs from cache, unless the path *is* to the cache
if pkg_path and not is_cache and (is_local or md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
if not is_local:
if fn not in index or index[fn].get('not_fetched'):
channels.add(channel)
verifies.append((dist + '.tar.bz2', md5))
actions[FETCH].append(dist)
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index_args = index_args or {}
index_args = index_args.copy()
index_args['prepend'] = False
index_args['channel_urls'] = channels
index.update(get_index(**index_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
index_args = index_args or {}
index = get_index(**index_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = name_dist(dist)
fn = dist2filename(dist)
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[dist] = r.index[dist]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, subdir, notfound)
raise RuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist + '.tar.bz2'
if fkey not in index:
info['not_fetched'] = True
index[fkey] = info
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, index_args=index_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
<|code_end|>
| conda/misc.py
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import
import os
import re
import shutil
import sys
from collections import defaultdict
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked,
linked_data, find_new_location, cached_url, dist2filename)
from .compat import iteritems, itervalues
from .config import is_url, url_channel, root_dir, envs_dirs, subdir
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, url_path as utils_url_path
from .api import get_index
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta['files']))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
linked = {name_dist(dist): dist for dist in install_linked(prefix)}
index_args = index_args or {}
index = index or {}
verifies = []
channels = set()
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
sys.exit('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
sys.exit('Error: file not found: %s' % join(url_p, fn))
url_p = utils_url_path(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# is_local: if the tarball is stored locally (file://)
# is_cache: if the tarball is sitting in our cache
is_local = url.startswith('file://')
prefix = cached_url(url) if is_local else None
is_cache = prefix is not None
if is_cache:
# Channel information from the cache
schannel = 'defaults' if prefix == '' else prefix[:-2]
else:
# Channel information from the URL
channel, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
fn = prefix + fn
dist = fn[:-8]
# Add explicit file to index so we'll be sure to see it later
if is_local:
index[fn] = {'fn': dist2filename(fn), 'url': url, 'md5': md5}
verifies.append((fn, md5))
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
# Also remove explicit tarballs from cache, unless the path *is* to the cache
if pkg_path and not is_cache and (is_local or md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
_, conflict = find_new_location(dist)
if conflict:
actions[RM_FETCHED].append(conflict)
if not is_local:
if fn not in index or index[fn].get('not_fetched'):
channels.add(schannel)
verifies.append((dist + '.tar.bz2', md5))
actions[FETCH].append(dist)
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = name_dist(dist)
if name in linked:
actions[UNLINK].append(linked[name])
actions[LINK].append(dist)
# Pull the repodata for channels we are using
if channels:
index_args = index_args or {}
index_args = index_args.copy()
index_args['prepend'] = False
index_args['channel_urls'] = list(channels)
index.update(get_index(**index_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(fn)
if info is None:
sys.exit("Error: no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
sys.exit(
'MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if sys.platform == 'win32' and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if sys.platform == 'win32' and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def which_package(path):
"""
given the path (of a (presumably) conda installed file) iterate over
the conda packages the file came from. Usually the iteration yields
only one package.
"""
path = abspath(path)
prefix = which_prefix(path)
if prefix is None:
raise RuntimeError("could not determine conda prefix from: %s" % path)
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if any(abspath(join(prefix, f)) == path for f in meta['files']):
yield dist
def discard_conda(dists):
return [dist for dist in dists if not name_dist(dist) == 'conda']
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if sys.platform == 'win32' and exists(join(root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
index_args = index_args or {}
index = get_index(**index_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = name_dist(dist)
fn = dist2filename(dist)
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[dist] = r.index[dist]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, subdir, notfound)
raise RuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist + '.tar.bz2'
if fkey not in index:
info['not_fetched'] = True
index[fkey] = info
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort(urls.keys())
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
os.unlink(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, index_args=index_args)
return actions, untracked_files
def install_local_packages(prefix, paths, verbose=False):
explicit(paths, prefix, verbose=verbose)
def environment_for_conda_environment(prefix=root_dir):
# prepend the bin directory to the path
fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin'
binpath = fmt % abspath(prefix)
path = os.path.pathsep.join([binpath, os.getenv('PATH')])
env = {'PATH': path}
# copy existing environment variables, but not anything with PATH in it
for k, v in iteritems(os.environ):
if k != 'PATH':
env[k] = v
return binpath, env
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield root_dir
| conda/misc.py
--- a/conda/misc.py
+++ b/conda/misc.py
@@ -46,7 +46,7 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None,
index_args = index_args or {}
index = index or {}
verifies = []
- channels = {}
+ channels = set()
for spec in specs:
if spec == '@EXPLICIT':
continue
@@ -106,7 +106,7 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None,
actions[RM_FETCHED].append(conflict)
if not is_local:
if fn not in index or index[fn].get('not_fetched'):
- channels.add(channel)
+ channels.add(schannel)
verifies.append((dist + '.tar.bz2', md5))
actions[FETCH].append(dist)
actions[EXTRACT].append(dist)
@@ -122,7 +122,7 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None,
index_args = index_args or {}
index_args = index_args.copy()
index_args['prepend'] = False
- index_args['channel_urls'] = channels
+ index_args['channel_urls'] = list(channels)
index.update(get_index(**index_args))
# Finish the MD5 verification |
put ftp:// back into 4.2
| conda/connection.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import cgi
import email
import mimetypes
import os
import platform
import requests
import tempfile
from io import BytesIO
from logging import getLogger
from . import __version__ as VERSION
from .base.context import context, platform as context_platform
from .common.url import url_to_path, url_to_s3_info
from .utils import gnu_get_libc_version
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if context_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif context_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=VERSION,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = context.proxy_servers
if proxies:
self.proxies = proxies
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = context.ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_s3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
<|code_end|>
conda/exceptions.py
<|code_start|>
from __future__ import absolute_import, division, print_function
import sys
from traceback import format_exc
from .compat import iteritems, iterkeys
class CondaError(Exception):
def __init__(self, *args, **kwargs):
super(CondaError, self).__init__(*args, **kwargs)
def __repr__(self):
ret_str = ' '.join([str(arg) for arg in self.args if not isinstance(arg, bool)])
return ret_str
def __str__(self):
ret_str = ' '.join([str(arg) for arg in self.args if not isinstance(arg, bool)])
return ret_str
class InvalidInstruction(CondaError):
def __init__(self, instruction, *args, **kwargs):
msg = "No handler for instruction: %r\n" % instruction
super(InvalidInstruction, self).__init__(msg, *args, **kwargs)
class LockError(CondaError, RuntimeError):
def __init__(self, message, *args, **kwargs):
msg = "Lock error: %s" % message
super(LockError, self).__init__(msg, *args, **kwargs)
class ArgumentError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Argument Error: %s\n' % message
super(ArgumentError, self).__init__(msg, *args, **kwargs)
class ArgumentNotFoundError(ArgumentError):
def __init__(self, argument, *args, **kwargs):
msg = 'Argument not found: %s\n' % argument
super(ArgumentNotFoundError, self).__init__(msg, *args, **kwargs)
class TooManyArgumentsError(ArgumentError):
def __init__(self, message, *args, **kwargs):
msg = 'Too many arguments: %s\n' % message
super(TooManyArgumentsError, self).__init__(msg, *args, **kwargs)
class TooFewArgumentsError(ArgumentError):
def __init__(self, message, *args, **kwargs):
msg = 'Too few arguments: %s\n' % message
super(TooFewArgumentsError, self).__init__(msg, *args, **kwargs)
class CommandError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Command Error: %s\n' % message
super(CommandError, self).__init__(msg, *args, **kwargs)
class CommandNotFoundError(CommandError):
def __init__(self, message, *args, **kwargs):
msg = 'Command not found: %s\n' % message
super(CommandNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaFileNotFoundError(CondaError, OSError):
def __init__(self, message, *args, **kwargs):
msg = "File not found: %s\n" % message
super(CondaFileNotFoundError, self).__init__(msg, *args, **kwargs)
class DirectoryNotFoundError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Directory not found: %s\n' % message
super(DirectoryNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaEnvironmentError(CondaError, EnvironmentError):
def __init__(self, message, *args, **kwargs):
msg = 'Environment not found: %s\n' % message
super(CondaEnvironmentError, self).__init__(msg, *args, **kwargs)
class DryRunExit(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Dry run: exiting\n'
super(DryRunExit, self).__init__(msg, *args, **kwargs)
class CondaSystemExit(CondaError, SystemExit):
def __init__(self, *args, **kwargs):
super(CondaSystemExit, self).__init__(*args, **kwargs)
class SubprocessExit(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Subprocess exiting\n'
super(SubprocessExit, self).__init__(msg, *args, **kwargs)
class PaddingError(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Padding error:\n'
super(PaddingError, self).__init__(msg, *args, **kwargs)
class LinkError(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Link error\n'
super(LinkError, self).__init__(msg, *args, **kwargs)
class CondaOSError(CondaError, OSError):
def __init__(self, message, *args, **kwargs):
msg = 'OS error: %s\n' % message
super(CondaOSError, self).__init__(msg, *args, **kwargs)
class AlreadyInitializedError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = message + '\n'
super(AlreadyInitializedError, self).__init__(msg, *args, **kwargs)
class ProxyError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Proxy error: %s\n' % message
super(ProxyError, self).__init__(msg, *args, **kwargs)
class CondaIOError(CondaError, IOError):
def __init__(self, message, *args, **kwargs):
msg = 'IO error: %s\n' % message
super(CondaIOError, self).__init__(msg, *args, **kwargs)
class CondaFileIOError(CondaIOError):
def __init__(self, message, *args, **kwargs):
msg = "Couldn't read or write to file. %s\n" % message
super(CondaFileIOError, self).__init__(msg, *args, **kwargs)
class CondaKeyError(CondaError, KeyError):
def __init__(self, message, *args, **kwargs):
self.msg = 'Key error: %s\n' % message
super(CondaKeyError, self).__init__(self.msg, *args, **kwargs)
class ChannelError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Channel Error: %s\n' % message
super(ChannelError, self).__init__(msg, *args, **kwargs)
class ChannelNotAllowed(ChannelError):
def __init__(self, message, *args, **kwargs):
msg = 'Channel not allowed: %s\n' % message
super(ChannelNotAllowed, self).__init__(msg, *args, **kwargs)
class CondaImportError(CondaError, ImportError):
def __init__(self, message, *args, **kwargs):
msg = 'Import error: %s\n' % message
super(CondaImportError, self).__init__(msg, *args, **kwargs)
class ParseError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Parse error: %s\n' % message
super(ParseError, self).__init__(msg, *args, **kwargs)
class CouldntParseError(ParseError):
def __init__(self, reason, *args, **kwargs):
self.args = ["""Error: Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s\n""" % reason]
super(CouldntParseError, self).__init__(self.args[0], *args, **kwargs)
def __repr__(self):
return self.args[0]
class MD5MismatchError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'MD5MismatchError: %s\n' % message
super(MD5MismatchError, self).__init__(msg, *args, **kwargs)
class PackageNotFoundError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Package not found: %s\n' % message
super(PackageNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaHTTPError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'HTTP Error: %s\n' % message
super(CondaHTTPError, self).__init__(msg, *args, **kwargs)
class NoPackagesFoundError(CondaError, RuntimeError):
'''An exception to report that requested packages are missing.
Args:
bad_deps: a list of tuples of MatchSpecs, assumed to be dependency
chains, from top level to bottom.
Returns:
Raises an exception with a formatted message detailing the
missing packages and/or dependencies.
'''
def __init__(self, bad_deps, *args, **kwargs):
from .resolve import dashlist
from .base.context import subdir
deps = set(q[-1].spec for q in bad_deps)
if all(len(q) > 1 for q in bad_deps):
what = "Dependencies" if len(bad_deps) > 1 else "Dependency"
elif all(len(q) == 1 for q in bad_deps):
what = "Packages" if len(bad_deps) > 1 else "Package"
else:
what = "Packages/dependencies"
bad_deps = dashlist(' -> '.join(map(str, q)) for q in bad_deps)
msg = '%s missing in current %s channels: %s\n' % (what, subdir, bad_deps)
super(NoPackagesFoundError, self).__init__(msg, *args, **kwargs)
self.pkgs = deps
class UnsatisfiableError(CondaError, RuntimeError):
'''An exception to report unsatisfiable dependencies.
Args:
bad_deps: a list of tuples of objects (likely MatchSpecs).
chains: (optional) if True, the tuples are interpreted as chains
of dependencies, from top level to bottom. If False, the tuples
are interpreted as simple lists of conflicting specs.
Returns:
Raises an exception with a formatted message detailing the
unsatisfiable specifications.
'''
def __init__(self, bad_deps, chains=True, *args, **kwargs):
from .resolve import dashlist, MatchSpec
bad_deps = [list(map(lambda x: x.spec, dep)) for dep in bad_deps]
if chains:
chains = {}
for dep in sorted(bad_deps, key=len, reverse=True):
dep1 = [str(MatchSpec(s)).partition(' ') for s in dep[1:]]
key = (dep[0],) + tuple(v[0] for v in dep1)
vals = ('',) + tuple(v[2] for v in dep1)
found = False
for key2, csets in iteritems(chains):
if key2[:len(key)] == key:
for cset, val in zip(csets, vals):
cset.add(val)
found = True
if not found:
chains[key] = [{val} for val in vals]
bad_deps = []
for key, csets in iteritems(chains):
deps = []
for name, cset in zip(key, csets):
if '' not in cset:
pass
elif len(cset) == 1:
cset.clear()
else:
cset.remove('')
cset.add('*')
if name[0] == '@':
name = 'feature:' + name[1:]
deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name)
chains[key] = ' -> '.join(deps)
bad_deps = [chains[key] for key in sorted(iterkeys(chains))]
msg = '''The following specifications were found to be in conflict:%s
Use "conda info <package>" to see the dependencies for each package.'''
else:
bad_deps = [sorted(dep) for dep in bad_deps]
bad_deps = [', '.join(dep) for dep in sorted(bad_deps)]
msg = '''The following specifications were found to be incompatible with the
others, or with the existing package set:%s
Use "conda info <package>" to see the dependencies for each package.'''
msg = msg % dashlist(bad_deps) + '\n'
super(UnsatisfiableError, self).__init__(msg, *args, **kwargs)
class InstallError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Install error: %s\n' % message
super(InstallError, self).__init__(msg, *args, **kwargs)
class RemoveError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'RemoveError: %s\n' % message
super(RemoveError, self).__init__(msg, *args, **kwargs)
class CondaIndexError(CondaError, IndexError):
def __init__(self, message, *args, **kwargs):
msg = 'Index error: %s\n' % message
super(CondaIndexError, self).__init__(msg, *args, **kwargs)
class CondaRuntimeError(CondaError, RuntimeError):
def __init__(self, message, *args, **kwargs):
msg = 'Runtime error: %s\n' % message
super(CondaRuntimeError, self).__init__(msg, *args, **kwargs)
class CondaValueError(CondaError, ValueError):
def __init__(self, message, *args, **kwargs):
msg = 'Value error: %s\n' % message
super(CondaValueError, self).__init__(msg, *args, **kwargs)
class ValidationError(CondaValueError):
pass
class CondaTypeError(CondaError, TypeError):
def __init__(self, message, *args, **kwargs):
msg = 'Type error: %s\n' % message
super(CondaTypeError, self).__init__(msg, *args, **kwargs)
class CondaAssertionError(CondaError, AssertionError):
def __init__(self, message, *args, **kwargs):
msg = 'Assertion error: %s\n' % message
super(CondaAssertionError, self).__init__(msg, *args, **kwargs)
class CondaHistoryError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'History error: %s\n' % message
super(CondaHistoryError, self).__init__(msg, *args, **kwargs)
class CondaSignatureError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Signature error: %s\n' % message
super(CondaSignatureError, self).__init__(msg, *args, **kwargs)
def print_exception(exception):
from conda.base.context import context
from conda.cli.common import stdout_json
from sys import stderr
message = repr(exception)
if context.json:
stdout_json(dict(error=message))
else:
stderr.write(message)
def get_info():
from conda.cli import conda_argparse
from conda.cli.main_info import configure_parser
from shlex import split
from conda.common.io import captured
p = conda_argparse.ArgumentParser()
sub_parsers = p.add_subparsers(metavar='command', dest='cmd')
configure_parser(sub_parsers)
args = p.parse_args(split("info"))
with captured() as c:
args.func(args, p)
return c.stdout, c.stderr
def print_unexpected_error_message(e):
traceback = format_exc()
from conda.base.context import context
if context.json:
from conda.cli.common import stdout_json
stdout_json(dict(error=traceback))
else:
message = """\
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
"""
print(message)
command = ' '.join(sys.argv)
if ' info' not in command:
# get and print `conda info`
info_stdout, info_stderr = get_info()
print(info_stdout if info_stdout else info_stderr)
print("`$ {0}`".format(command))
print('\n')
print('\n'.join(' ' + line for line in traceback.splitlines()))
def conda_exception_handler(func, *args, **kwargs):
try:
return_value = func(*args, **kwargs)
if isinstance(return_value, int):
return return_value
except CondaRuntimeError as e:
print_unexpected_error_message(e)
return 1
except CondaError as e:
from conda.base.context import context
if context.debug:
print_unexpected_error_message(e)
else:
print_exception(e)
return 1
except Exception as e:
print_unexpected_error_message(e)
return 1
<|code_end|>
| conda/connection.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from base64 import b64decode
import ftplib
import cgi
import email
import mimetypes
import os
import platform
import requests
import tempfile
from io import BytesIO
from logging import getLogger
from conda.exceptions import AuthenticationError
from . import __version__ as VERSION
from .base.context import context, platform as context_platform
from .common.url import url_to_path, url_to_s3_info, urlparse
from .utils import gnu_get_libc_version
from .compat import StringIO
RETRIES = 3
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# Collect relevant info from OS for reporting purposes (present in User-Agent)
_user_agent = ("conda/{conda_ver} "
"requests/{requests_ver} "
"{python}/{py_ver} "
"{system}/{kernel} {dist}/{ver}")
glibc_ver = gnu_get_libc_version()
if context_platform == 'linux':
distinfo = platform.linux_distribution()
dist, ver = distinfo[0], distinfo[1]
elif context_platform == 'osx':
dist = 'OSX'
ver = platform.mac_ver()[0]
else:
dist = platform.system()
ver = platform.version()
user_agent = _user_agent.format(conda_ver=VERSION,
requests_ver=requests.__version__,
python=platform.python_implementation(),
py_ver=platform.python_version(),
system=platform.system(), kernel=platform.release(),
dist=dist, ver=ver)
if glibc_ver:
user_agent += " glibc/{}".format(glibc_ver)
class CondaSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop('retries', RETRIES)
super(CondaSession, self).__init__(*args, **kwargs)
proxies = context.proxy_servers
if proxies:
self.proxies = proxies
# Configure retries
if retries:
http_adapter = requests.adapters.HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# Enable s3:// urls
self.mount("s3://", S3Adapter())
self.headers['User-Agent'] = user_agent
self.verify = context.ssl_verify
class S3Adapter(requests.adapters.BaseAdapter):
def __init__(self):
super(S3Adapter, self).__init__()
self._temp_file = None
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
import boto
# silly patch for AWS because
# TODO: remove or change to warning once boto >2.39.0 is released
# https://github.com/boto/boto/issues/2617
from boto.pyami.config import Config, ConfigParser
def get(self, section, name, default=None, **kw):
try:
val = ConfigParser.get(self, section, name, **kw)
except:
val = default
return val
Config.get = get
except ImportError:
stderrlog.info('\nError: boto is required for S3 channels. '
'Please install it with `conda install boto`\n'
'Make sure to run `source deactivate` if you '
'are in a conda environment.\n')
resp.status_code = 404
return resp
conn = boto.connect_s3()
bucket_name, key_string = url_to_s3_info(request.url)
# Get the bucket without validation that it exists and that we have
# permissions to list its contents.
bucket = conn.get_bucket(bucket_name, validate=False)
try:
key = bucket.get_key(key_string)
except boto.exception.S3ResponseError as exc:
# This exception will occur if the bucket does not exist or if the
# user does not have permission to list its contents.
resp.status_code = 404
resp.raw = exc
return resp
if key and key.exists:
modified = key.last_modified
content_type = key.content_type or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": key.size,
"Last-Modified": modified,
})
_, self._temp_file = tempfile.mkstemp()
key.get_contents_to_filename(self._temp_file)
f = open(self._temp_file, 'rb')
resp.raw = f
resp.close = resp.raw.close
else:
resp.status_code = 404
return resp
def close(self):
if self._temp_file:
os.remove(self._temp_file)
class LocalFSAdapter(requests.adapters.BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = requests.models.Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = requests.structures.CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
# Taken from requests-ftp
# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
# Copyright 2012 Cory Benfield
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class FTPAdapter(requests.adapters.BaseAdapter):
'''A Requests Transport Adapter that handles FTP urls.'''
def __init__(self):
super(FTPAdapter, self).__init__()
# Build a dictionary keyed off the methods we support in upper case.
# The values of this dictionary should be the functions we use to
# send the specific queries.
self.func_table = {'LIST': self.list,
'RETR': self.retr,
'STOR': self.stor,
'NLST': self.nlst,
'GET': self.retr}
def send(self, request, **kwargs):
'''Sends a PreparedRequest object over FTP. Returns a response object.
'''
# Get the authentication from the prepared request, if any.
auth = self.get_username_password_from_header(request)
# Next, get the host and the path.
host, port, path = self.get_host_and_path_from_url(request)
# Sort out the timeout.
timeout = kwargs.get('timeout', None)
# Establish the connection and login if needed.
self.conn = ftplib.FTP()
self.conn.connect(host, port, timeout)
if auth is not None:
self.conn.login(auth[0], auth[1])
else:
self.conn.login()
# Get the method and attempt to find the function to call.
resp = self.func_table[request.method](path, request)
# Return the response.
return resp
def close(self):
'''Dispose of any internal state.'''
# Currently this is a no-op.
pass
def list(self, path, request):
'''Executes the FTP LIST command on the given path.'''
data = StringIO()
# To ensure the StringIO gets cleaned up, we need to alias its close
# method to the release_conn() method. This is a dirty hack, but there
# you go.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('LIST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def retr(self, path, request):
'''Executes the FTP RETR command on the given path.'''
data = BytesIO()
# To ensure the BytesIO gets cleaned up, we need to alias its close
# method. See self.list().
data.release_conn = data.close
code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
response = build_binary_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def stor(self, path, request):
'''Executes the FTP STOR command on the given path.'''
# First, get the file handle. We assume (bravely)
# that there is only one file to be sent to a given URL. We also
# assume that the filename is sent as part of the URL, not as part of
# the files argument. Both of these assumptions are rarely correct,
# but they are easy.
data = parse_multipart_files(request)
# Split into the path and the filename.
path, filename = os.path.split(path)
# Switch directories and upload the data.
self.conn.cwd(path)
code = self.conn.storbinary('STOR ' + filename, data)
# Close the connection and build the response.
self.conn.close()
response = build_binary_response(request, BytesIO(), code)
return response
def nlst(self, path, request):
'''Executes the FTP NLST command on the given path.'''
data = StringIO()
# Alias the close method.
data.release_conn = data.close
self.conn.cwd(path)
code = self.conn.retrbinary('NLST', data_callback_factory(data))
# When that call has finished executing, we'll have all our data.
response = build_text_response(request, data, code)
# Close the connection.
self.conn.close()
return response
def get_username_password_from_header(self, request):
'''Given a PreparedRequest object, reverse the process of adding HTTP
Basic auth to obtain the username and password. Allows the FTP adapter
to piggyback on the basic auth notation without changing the control
flow.'''
auth_header = request.headers.get('Authorization')
if auth_header:
# The basic auth header is of the form 'Basic xyz'. We want the
# second part. Check that we have the right kind of auth though.
encoded_components = auth_header.split()[:2]
if encoded_components[0] != 'Basic':
raise AuthenticationError('Invalid form of Authentication used.')
else:
encoded = encoded_components[1]
# Decode the base64 encoded string.
decoded = b64decode(encoded)
# The string is of the form 'username:password'. Split on the
# colon.
components = decoded.split(':')
username = components[0]
password = components[1]
return (username, password)
else:
# No auth header. Return None.
return None
def get_host_and_path_from_url(self, request):
'''Given a PreparedRequest object, split the URL in such a manner as to
determine the host and the path. This is a separate method to wrap some
of urlparse's craziness.'''
url = request.url
# scheme, netloc, path, params, query, fragment = urlparse(url)
parsed = urlparse(url)
path = parsed.path
# If there is a slash on the front of the path, chuck it.
if path[0] == '/':
path = path[1:]
host = parsed.hostname
port = parsed.port or 0
return (host, port, path)
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
variable should be a file-like structure.'''
def callback(data):
variable.write(data)
return
return callback
def build_text_response(request, data, code):
'''Build a response for textual data.'''
return build_response(request, data, code, 'ascii')
def build_binary_response(request, data, code):
'''Build a response for data whose encoding is unknown.'''
return build_response(request, data, code, None)
def build_response(request, data, code, encoding):
'''Builds a response object from the data returned by ftplib, using the
specified encoding.'''
response = requests.Response()
response.encoding = encoding
# Fill in some useful fields.
response.raw = data
response.url = request.url
response.request = request
response.status_code = int(code.split()[0])
# Make sure to seek the file-like raw object back to the start.
response.raw.seek(0)
# Run the response hook.
response = requests.hooks.dispatch_hook('response', request.hooks, response)
return response
def parse_multipart_files(request):
'''Given a prepared reqest, return a file-like object containing the
original data. This is pretty hacky.'''
# Start by grabbing the pdict.
_, pdict = cgi.parse_header(request.headers['Content-Type'])
# Now, wrap the multipart data in a BytesIO buffer. This is annoying.
buf = BytesIO()
buf.write(request.body)
buf.seek(0)
# Parse the data. Simply take the first file.
data = cgi.parse_multipart(buf, pdict)
_, filedata = data.popitem()
buf.close()
# Get a BytesIO now, and write the file into it.
buf = BytesIO()
buf.write(''.join(filedata))
buf.seek(0)
return buf
conda/exceptions.py
from __future__ import absolute_import, division, print_function
import sys
from traceback import format_exc
from .compat import iteritems, iterkeys
class CondaError(Exception):
def __init__(self, *args, **kwargs):
super(CondaError, self).__init__(*args, **kwargs)
def __repr__(self):
ret_str = ' '.join([str(arg) for arg in self.args if not isinstance(arg, bool)])
return ret_str
def __str__(self):
ret_str = ' '.join([str(arg) for arg in self.args if not isinstance(arg, bool)])
return ret_str
class InvalidInstruction(CondaError):
def __init__(self, instruction, *args, **kwargs):
msg = "No handler for instruction: %r\n" % instruction
super(InvalidInstruction, self).__init__(msg, *args, **kwargs)
class LockError(CondaError, RuntimeError):
def __init__(self, message, *args, **kwargs):
msg = "Lock error: %s" % message
super(LockError, self).__init__(msg, *args, **kwargs)
class ArgumentError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Argument Error: %s\n' % message
super(ArgumentError, self).__init__(msg, *args, **kwargs)
class ArgumentNotFoundError(ArgumentError):
def __init__(self, argument, *args, **kwargs):
msg = 'Argument not found: %s\n' % argument
super(ArgumentNotFoundError, self).__init__(msg, *args, **kwargs)
class TooManyArgumentsError(ArgumentError):
def __init__(self, message, *args, **kwargs):
msg = 'Too many arguments: %s\n' % message
super(TooManyArgumentsError, self).__init__(msg, *args, **kwargs)
class TooFewArgumentsError(ArgumentError):
def __init__(self, message, *args, **kwargs):
msg = 'Too few arguments: %s\n' % message
super(TooFewArgumentsError, self).__init__(msg, *args, **kwargs)
class CommandError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Command Error: %s\n' % message
super(CommandError, self).__init__(msg, *args, **kwargs)
class CommandNotFoundError(CommandError):
def __init__(self, message, *args, **kwargs):
msg = 'Command not found: %s\n' % message
super(CommandNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaFileNotFoundError(CondaError, OSError):
def __init__(self, message, *args, **kwargs):
msg = "File not found: %s\n" % message
super(CondaFileNotFoundError, self).__init__(msg, *args, **kwargs)
class DirectoryNotFoundError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Directory not found: %s\n' % message
super(DirectoryNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaEnvironmentError(CondaError, EnvironmentError):
def __init__(self, message, *args, **kwargs):
msg = 'Environment not found: %s\n' % message
super(CondaEnvironmentError, self).__init__(msg, *args, **kwargs)
class DryRunExit(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Dry run: exiting\n'
super(DryRunExit, self).__init__(msg, *args, **kwargs)
class CondaSystemExit(CondaError, SystemExit):
def __init__(self, *args, **kwargs):
super(CondaSystemExit, self).__init__(*args, **kwargs)
class SubprocessExit(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Subprocess exiting\n'
super(SubprocessExit, self).__init__(msg, *args, **kwargs)
class PaddingError(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Padding error:\n'
super(PaddingError, self).__init__(msg, *args, **kwargs)
class LinkError(CondaError):
def __init__(self, *args, **kwargs):
msg = 'Link error\n'
super(LinkError, self).__init__(msg, *args, **kwargs)
class CondaOSError(CondaError, OSError):
def __init__(self, message, *args, **kwargs):
msg = 'OS error: %s\n' % message
super(CondaOSError, self).__init__(msg, *args, **kwargs)
class AlreadyInitializedError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = message + '\n'
super(AlreadyInitializedError, self).__init__(msg, *args, **kwargs)
class ProxyError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Proxy error: %s\n' % message
super(ProxyError, self).__init__(msg, *args, **kwargs)
class CondaIOError(CondaError, IOError):
def __init__(self, message, *args, **kwargs):
msg = 'IO error: %s\n' % message
super(CondaIOError, self).__init__(msg, *args, **kwargs)
class CondaFileIOError(CondaIOError):
def __init__(self, message, *args, **kwargs):
msg = "Couldn't read or write to file. %s\n" % message
super(CondaFileIOError, self).__init__(msg, *args, **kwargs)
class CondaKeyError(CondaError, KeyError):
def __init__(self, message, *args, **kwargs):
self.msg = 'Key error: %s\n' % message
super(CondaKeyError, self).__init__(self.msg, *args, **kwargs)
class ChannelError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Channel Error: %s\n' % message
super(ChannelError, self).__init__(msg, *args, **kwargs)
class ChannelNotAllowed(ChannelError):
def __init__(self, message, *args, **kwargs):
msg = 'Channel not allowed: %s\n' % message
super(ChannelNotAllowed, self).__init__(msg, *args, **kwargs)
class CondaImportError(CondaError, ImportError):
def __init__(self, message, *args, **kwargs):
msg = 'Import error: %s\n' % message
super(CondaImportError, self).__init__(msg, *args, **kwargs)
class ParseError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Parse error: %s\n' % message
super(ParseError, self).__init__(msg, *args, **kwargs)
class CouldntParseError(ParseError):
def __init__(self, reason, *args, **kwargs):
self.args = ["""Error: Could not parse the yaml file. Use -f to use the
yaml parser (this will remove any structure or comments from the existing
.condarc file). Reason: %s\n""" % reason]
super(CouldntParseError, self).__init__(self.args[0], *args, **kwargs)
def __repr__(self):
return self.args[0]
class MD5MismatchError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'MD5MismatchError: %s\n' % message
super(MD5MismatchError, self).__init__(msg, *args, **kwargs)
class PackageNotFoundError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Package not found: %s\n' % message
super(PackageNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaHTTPError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'HTTP Error: %s\n' % message
super(CondaHTTPError, self).__init__(msg, *args, **kwargs)
class AuthenticationError(CondaError):
pass
class NoPackagesFoundError(CondaError, RuntimeError):
'''An exception to report that requested packages are missing.
Args:
bad_deps: a list of tuples of MatchSpecs, assumed to be dependency
chains, from top level to bottom.
Returns:
Raises an exception with a formatted message detailing the
missing packages and/or dependencies.
'''
def __init__(self, bad_deps, *args, **kwargs):
from .resolve import dashlist
from .base.context import subdir
deps = set(q[-1].spec for q in bad_deps)
if all(len(q) > 1 for q in bad_deps):
what = "Dependencies" if len(bad_deps) > 1 else "Dependency"
elif all(len(q) == 1 for q in bad_deps):
what = "Packages" if len(bad_deps) > 1 else "Package"
else:
what = "Packages/dependencies"
bad_deps = dashlist(' -> '.join(map(str, q)) for q in bad_deps)
msg = '%s missing in current %s channels: %s\n' % (what, subdir, bad_deps)
super(NoPackagesFoundError, self).__init__(msg, *args, **kwargs)
self.pkgs = deps
class UnsatisfiableError(CondaError, RuntimeError):
'''An exception to report unsatisfiable dependencies.
Args:
bad_deps: a list of tuples of objects (likely MatchSpecs).
chains: (optional) if True, the tuples are interpreted as chains
of dependencies, from top level to bottom. If False, the tuples
are interpreted as simple lists of conflicting specs.
Returns:
Raises an exception with a formatted message detailing the
unsatisfiable specifications.
'''
def __init__(self, bad_deps, chains=True, *args, **kwargs):
from .resolve import dashlist, MatchSpec
bad_deps = [list(map(lambda x: x.spec, dep)) for dep in bad_deps]
if chains:
chains = {}
for dep in sorted(bad_deps, key=len, reverse=True):
dep1 = [str(MatchSpec(s)).partition(' ') for s in dep[1:]]
key = (dep[0],) + tuple(v[0] for v in dep1)
vals = ('',) + tuple(v[2] for v in dep1)
found = False
for key2, csets in iteritems(chains):
if key2[:len(key)] == key:
for cset, val in zip(csets, vals):
cset.add(val)
found = True
if not found:
chains[key] = [{val} for val in vals]
bad_deps = []
for key, csets in iteritems(chains):
deps = []
for name, cset in zip(key, csets):
if '' not in cset:
pass
elif len(cset) == 1:
cset.clear()
else:
cset.remove('')
cset.add('*')
if name[0] == '@':
name = 'feature:' + name[1:]
deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name)
chains[key] = ' -> '.join(deps)
bad_deps = [chains[key] for key in sorted(iterkeys(chains))]
msg = '''The following specifications were found to be in conflict:%s
Use "conda info <package>" to see the dependencies for each package.'''
else:
bad_deps = [sorted(dep) for dep in bad_deps]
bad_deps = [', '.join(dep) for dep in sorted(bad_deps)]
msg = '''The following specifications were found to be incompatible with the
others, or with the existing package set:%s
Use "conda info <package>" to see the dependencies for each package.'''
msg = msg % dashlist(bad_deps) + '\n'
super(UnsatisfiableError, self).__init__(msg, *args, **kwargs)
class InstallError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Install error: %s\n' % message
super(InstallError, self).__init__(msg, *args, **kwargs)
class RemoveError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'RemoveError: %s\n' % message
super(RemoveError, self).__init__(msg, *args, **kwargs)
class CondaIndexError(CondaError, IndexError):
def __init__(self, message, *args, **kwargs):
msg = 'Index error: %s\n' % message
super(CondaIndexError, self).__init__(msg, *args, **kwargs)
class CondaRuntimeError(CondaError, RuntimeError):
def __init__(self, message, *args, **kwargs):
msg = 'Runtime error: %s\n' % message
super(CondaRuntimeError, self).__init__(msg, *args, **kwargs)
class CondaValueError(CondaError, ValueError):
def __init__(self, message, *args, **kwargs):
msg = 'Value error: %s\n' % message
super(CondaValueError, self).__init__(msg, *args, **kwargs)
class ValidationError(CondaValueError):
pass
class CondaTypeError(CondaError, TypeError):
def __init__(self, message, *args, **kwargs):
msg = 'Type error: %s\n' % message
super(CondaTypeError, self).__init__(msg, *args, **kwargs)
class CondaAssertionError(CondaError, AssertionError):
def __init__(self, message, *args, **kwargs):
msg = 'Assertion error: %s\n' % message
super(CondaAssertionError, self).__init__(msg, *args, **kwargs)
class CondaHistoryError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'History error: %s\n' % message
super(CondaHistoryError, self).__init__(msg, *args, **kwargs)
class CondaSignatureError(CondaError):
def __init__(self, message, *args, **kwargs):
msg = 'Signature error: %s\n' % message
super(CondaSignatureError, self).__init__(msg, *args, **kwargs)
def print_exception(exception):
from conda.base.context import context
from conda.cli.common import stdout_json
from sys import stderr
message = repr(exception)
if context.json:
stdout_json(dict(error=message))
else:
stderr.write(message)
def get_info():
from conda.cli import conda_argparse
from conda.cli.main_info import configure_parser
from shlex import split
from conda.common.io import captured
p = conda_argparse.ArgumentParser()
sub_parsers = p.add_subparsers(metavar='command', dest='cmd')
configure_parser(sub_parsers)
args = p.parse_args(split("info"))
with captured() as c:
args.func(args, p)
return c.stdout, c.stderr
def print_unexpected_error_message(e):
traceback = format_exc()
from conda.base.context import context
if context.json:
from conda.cli.common import stdout_json
stdout_json(dict(error=traceback))
else:
message = """\
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
"""
print(message)
command = ' '.join(sys.argv)
if ' info' not in command:
# get and print `conda info`
info_stdout, info_stderr = get_info()
print(info_stdout if info_stdout else info_stderr)
print("`$ {0}`".format(command))
print('\n')
print('\n'.join(' ' + line for line in traceback.splitlines()))
def conda_exception_handler(func, *args, **kwargs):
try:
return_value = func(*args, **kwargs)
if isinstance(return_value, int):
return return_value
except CondaRuntimeError as e:
print_unexpected_error_message(e)
return 1
except CondaError as e:
from conda.base.context import context
if context.debug:
print_unexpected_error_message(e)
else:
print_exception(e)
return 1
except Exception as e:
print_unexpected_error_message(e)
return 1
| conda/connection.py
--- a/conda/connection.py
+++ b/conda/connection.py
@@ -6,6 +6,10 @@
from __future__ import print_function, division, absolute_import
+from base64 import b64decode
+
+import ftplib
+
import cgi
import email
import mimetypes
@@ -16,10 +20,12 @@
from io import BytesIO
from logging import getLogger
+from conda.exceptions import AuthenticationError
from . import __version__ as VERSION
from .base.context import context, platform as context_platform
-from .common.url import url_to_path, url_to_s3_info
+from .common.url import url_to_path, url_to_s3_info, urlparse
from .utils import gnu_get_libc_version
+from .compat import StringIO
RETRIES = 3
@@ -195,6 +201,196 @@ def close(self):
pass
+# Taken from requests-ftp
+# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py)
+
+# Copyright 2012 Cory Benfield
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class FTPAdapter(requests.adapters.BaseAdapter):
+ '''A Requests Transport Adapter that handles FTP urls.'''
+ def __init__(self):
+ super(FTPAdapter, self).__init__()
+
+ # Build a dictionary keyed off the methods we support in upper case.
+ # The values of this dictionary should be the functions we use to
+ # send the specific queries.
+ self.func_table = {'LIST': self.list,
+ 'RETR': self.retr,
+ 'STOR': self.stor,
+ 'NLST': self.nlst,
+ 'GET': self.retr}
+
+ def send(self, request, **kwargs):
+ '''Sends a PreparedRequest object over FTP. Returns a response object.
+ '''
+ # Get the authentication from the prepared request, if any.
+ auth = self.get_username_password_from_header(request)
+
+ # Next, get the host and the path.
+ host, port, path = self.get_host_and_path_from_url(request)
+
+ # Sort out the timeout.
+ timeout = kwargs.get('timeout', None)
+
+ # Establish the connection and login if needed.
+ self.conn = ftplib.FTP()
+ self.conn.connect(host, port, timeout)
+
+ if auth is not None:
+ self.conn.login(auth[0], auth[1])
+ else:
+ self.conn.login()
+
+ # Get the method and attempt to find the function to call.
+ resp = self.func_table[request.method](path, request)
+
+ # Return the response.
+ return resp
+
+ def close(self):
+ '''Dispose of any internal state.'''
+ # Currently this is a no-op.
+ pass
+
+ def list(self, path, request):
+ '''Executes the FTP LIST command on the given path.'''
+ data = StringIO()
+
+ # To ensure the StringIO gets cleaned up, we need to alias its close
+ # method to the release_conn() method. This is a dirty hack, but there
+ # you go.
+ data.release_conn = data.close
+
+ self.conn.cwd(path)
+ code = self.conn.retrbinary('LIST', data_callback_factory(data))
+
+ # When that call has finished executing, we'll have all our data.
+ response = build_text_response(request, data, code)
+
+ # Close the connection.
+ self.conn.close()
+
+ return response
+
+ def retr(self, path, request):
+ '''Executes the FTP RETR command on the given path.'''
+ data = BytesIO()
+
+ # To ensure the BytesIO gets cleaned up, we need to alias its close
+ # method. See self.list().
+ data.release_conn = data.close
+
+ code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data))
+
+ response = build_binary_response(request, data, code)
+
+ # Close the connection.
+ self.conn.close()
+
+ return response
+
+ def stor(self, path, request):
+ '''Executes the FTP STOR command on the given path.'''
+
+ # First, get the file handle. We assume (bravely)
+ # that there is only one file to be sent to a given URL. We also
+ # assume that the filename is sent as part of the URL, not as part of
+ # the files argument. Both of these assumptions are rarely correct,
+ # but they are easy.
+ data = parse_multipart_files(request)
+
+ # Split into the path and the filename.
+ path, filename = os.path.split(path)
+
+ # Switch directories and upload the data.
+ self.conn.cwd(path)
+ code = self.conn.storbinary('STOR ' + filename, data)
+
+ # Close the connection and build the response.
+ self.conn.close()
+
+ response = build_binary_response(request, BytesIO(), code)
+
+ return response
+
+ def nlst(self, path, request):
+ '''Executes the FTP NLST command on the given path.'''
+ data = StringIO()
+
+ # Alias the close method.
+ data.release_conn = data.close
+
+ self.conn.cwd(path)
+ code = self.conn.retrbinary('NLST', data_callback_factory(data))
+
+ # When that call has finished executing, we'll have all our data.
+ response = build_text_response(request, data, code)
+
+ # Close the connection.
+ self.conn.close()
+
+ return response
+
+ def get_username_password_from_header(self, request):
+ '''Given a PreparedRequest object, reverse the process of adding HTTP
+ Basic auth to obtain the username and password. Allows the FTP adapter
+ to piggyback on the basic auth notation without changing the control
+ flow.'''
+ auth_header = request.headers.get('Authorization')
+
+ if auth_header:
+ # The basic auth header is of the form 'Basic xyz'. We want the
+ # second part. Check that we have the right kind of auth though.
+ encoded_components = auth_header.split()[:2]
+ if encoded_components[0] != 'Basic':
+ raise AuthenticationError('Invalid form of Authentication used.')
+ else:
+ encoded = encoded_components[1]
+
+ # Decode the base64 encoded string.
+ decoded = b64decode(encoded)
+
+ # The string is of the form 'username:password'. Split on the
+ # colon.
+ components = decoded.split(':')
+ username = components[0]
+ password = components[1]
+ return (username, password)
+ else:
+ # No auth header. Return None.
+ return None
+
+ def get_host_and_path_from_url(self, request):
+ '''Given a PreparedRequest object, split the URL in such a manner as to
+ determine the host and the path. This is a separate method to wrap some
+ of urlparse's craziness.'''
+ url = request.url
+ # scheme, netloc, path, params, query, fragment = urlparse(url)
+ parsed = urlparse(url)
+ path = parsed.path
+
+ # If there is a slash on the front of the path, chuck it.
+ if path[0] == '/':
+ path = path[1:]
+
+ host = parsed.hostname
+ port = parsed.port or 0
+
+ return (host, port, path)
+
+
def data_callback_factory(variable):
'''Returns a callback suitable for use by the FTP library. This callback
will repeatedly save data into the variable provided to this function. This
conda/exceptions.py
--- a/conda/exceptions.py
+++ b/conda/exceptions.py
@@ -203,6 +203,10 @@ def __init__(self, message, *args, **kwargs):
super(CondaHTTPError, self).__init__(msg, *args, **kwargs)
+class AuthenticationError(CondaError):
+ pass
+
+
class NoPackagesFoundError(CondaError, RuntimeError):
'''An exception to report that requested packages are missing.
|
setuptools hard build dependency
Here: https://github.com/conda/conda/blob/master/setup.py#L8
@kalefranz I thought we had agreed that setuptools is not going to be a hard build dependency anymore.
| conda/_vendor/auxlib/packaging.py
<|code_start|>
# -*- coding: utf-8 -*-
"""
=====
Usage
=====
Method #1: auxlib.packaging as a run time dependency
---------------------------------------------------
Place the following lines in your package's main __init__.py
from auxlib import get_version
__version__ = get_version(__file__)
Method #2: auxlib.packaging as a build time-only dependency
----------------------------------------------------------
import auxlib
# When executing the setup.py, we need to be able to import ourselves, this
# means that we need to add the src directory to the sys.path.
here = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.join(here, "auxlib")
sys.path.insert(0, src_dir)
setup(
version=auxlib.__version__,
cmdclass={
'build_py': auxlib.BuildPyCommand,
'sdist': auxlib.SDistCommand,
'test': auxlib.Tox,
},
)
Place the following lines in your package's main __init__.py
from auxlib import get_version
__version__ = get_version(__file__)
Method #3: write .version file
------------------------------
Configuring `python setup.py test` for Tox
------------------------------------------
must use setuptools (distutils doesn't have a test cmd)
setup(
version=auxlib.__version__,
cmdclass={
'build_py': auxlib.BuildPyCommand,
'sdist': auxlib.SDistCommand,
'test': auxlib.Tox,
},
)
"""
from __future__ import print_function, division, absolute_import
import sys
from collections import namedtuple
from logging import getLogger
from os import getenv, remove
from os.path import abspath, dirname, expanduser, isdir, isfile, join
from re import compile
from shlex import split
from subprocess import CalledProcessError, Popen, PIPE
try:
from setuptools.command.build_py import build_py
from setuptools.command.sdist import sdist
from setuptools.command.test import test as TestCommand
except ImportError:
from distutils.command.build_py import build_py
from distutils.command.sdist import sdist
TestCommand = object
log = getLogger(__name__)
Response = namedtuple('Response', ['stdout', 'stderr', 'rc'])
GIT_DESCRIBE_REGEX = compile(r"(?:[_-a-zA-Z]*)"
r"(?P<version>\d+\.\d+\.\d+)"
r"(?:-(?P<dev>\d+)-g(?P<hash>[0-9a-f]{7}))$")
def call(path, command, raise_on_error=True):
p = Popen(split(command), cwd=path, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
rc = p.returncode
log.debug("{0} $ {1}\n"
" stdout: {2}\n"
" stderr: {3}\n"
" rc: {4}"
.format(path, command, stdout, stderr, rc))
if raise_on_error and rc != 0:
raise CalledProcessError(rc, command, "stdout: {0}\nstderr: {1}".format(stdout, stderr))
return Response(stdout.decode('utf-8'), stderr.decode('utf-8'), int(rc))
def _get_version_from_version_file(path):
file_path = join(path, '.version')
if isfile(file_path):
with open(file_path, 'r') as fh:
return fh.read().strip()
def _git_describe_tags(path):
call(path, "git update-index --refresh", raise_on_error=False)
response = call(path, "git describe --tags --long", raise_on_error=False)
if response.rc == 0:
return response.stdout.strip()
elif response.rc == 128 and "no names found" in response.stderr.lower():
# directory is a git repo, but no tags found
return None
elif response.rc == 128 and "not a git repository" in response.stderr.lower():
return None
elif response.rc == 127:
log.error("git not found on path: PATH={0}".format(getenv('PATH', None)))
raise CalledProcessError(response.rc, response.stderr)
else:
raise CalledProcessError(response.rc, response.stderr)
def _get_version_from_git_tag(path):
"""Return a PEP440-compliant version derived from the git status.
If that fails for any reason, return the first 7 chars of the changeset hash.
"""
m = GIT_DESCRIBE_REGEX.match(_git_describe_tags(path) or '')
if m is None:
return None
version, post_commit, hash = m.groups()
return version if post_commit == '0' else "{0}.dev{1}+{2}".format(version, post_commit, hash)
def get_version(dunder_file):
"""Returns a version string for the current package, derived
either from git or from a .version file.
This function is expected to run in two contexts. In a development
context, where .git/ exists, the version is pulled from git tags.
Using the BuildPyCommand and SDistCommand classes for cmdclass in
setup.py will write a .version file into any dist.
In an installed context, the .version file written at dist build
time is the source of version information.
"""
path = abspath(expanduser(dirname(dunder_file)))
return _get_version_from_version_file(path) or _get_version_from_git_tag(path)
def write_version_into_init(target_dir, version):
target_init_file = join(target_dir, "__init__.py")
assert isfile(target_init_file), "File not found: {0}".format(target_init_file)
with open(target_init_file, 'r') as f:
init_lines = f.readlines()
for q in range(len(init_lines)):
if init_lines[q].startswith('__version__'):
init_lines[q] = '__version__ = "{0}"\n'.format(version)
elif init_lines[q].startswith(('from auxlib', 'import auxlib')):
init_lines[q] = None
print("UPDATING {0}".format(target_init_file))
remove(target_init_file)
with open(target_init_file, 'w') as f:
f.write(''.join(l for l in init_lines if l is not None))
def write_version_file(target_dir, version):
assert isdir(target_dir), "Directory not found: {0}".format(target_dir)
target_file = join(target_dir, ".version")
with open(target_file, 'w') as f:
f.write(version)
class BuildPyCommand(build_py):
def run(self):
build_py.run(self)
target_dir = join(self.build_lib, self.distribution.metadata.name)
write_version_into_init(target_dir, self.distribution.metadata.version)
write_version_file(target_dir, self.distribution.metadata.version)
# TODO: separate out .version file implementation
class SDistCommand(sdist):
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
target_dir = join(base_dir, self.distribution.metadata.name)
write_version_into_init(target_dir, self.distribution.metadata.version)
write_version_file(target_dir, self.distribution.metadata.version)
class Tox(TestCommand):
# TODO: Make this class inherit from distutils instead of setuptools
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, because outside the eggs aren't loaded
from tox import cmdline
from shlex import split
args = self.tox_args
if args:
args = split(self.tox_args)
else:
args = ''
errno = cmdline(args=args)
sys.exit(errno)
<|code_end|>
setup.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
import os
import sys
from setuptools import find_packages
if 'develop' in sys.argv:
from setuptools import setup
else:
from distutils.core import setup
if not (sys.version_info[:2] == (2, 7) or sys.version_info[:2] >= (3, 3)):
sys.exit("conda is only meant for Python 2.7 or 3.3 and up. "
"current version: %d.%d" % sys.version_info[:2])
if os.environ.get('CONDA_DEFAULT_ENV'):
# Try to prevent accidentally installing conda into a non-root conda environment
sys.exit("""
You appear to be in a non-root conda environment. Conda is only supported in
the root environment. Deactivate and try again. If you believe this message
is in error, run CONDA_DEFAULT_ENV='' python setup.py.
""")
# When executing the setup.py, we need to be able to import ourselves, this
# means that we need to add the src directory to the sys.path.
here = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.join(here, "conda")
sys.path.insert(0, src_dir)
import conda # NOQA
with open(os.path.join(here, "README.rst")) as f:
long_description = f.read()
scripts = ['bin/activate',
'bin/deactivate',
]
if sys.platform == 'win32':
# Powershell scripts should go here
scripts.extend(['bin/activate.bat',
'bin/deactivate.bat'])
setup(
name=conda.__name__,
version=conda.__version__,
author=conda.__author__,
author_email=conda.__email__,
url=conda.__url__,
license=conda.__license__,
description=conda.__summary__,
long_description=long_description,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
packages=find_packages(exclude=("tests", "conda-env", "build", "utils", ".tox")),
cmdclass={
'build_py': conda._vendor.auxlib.BuildPyCommand,
'sdist': conda._vendor.auxlib.SDistCommand,
},
install_requires=[
'pycosat >=0.6.1',
],
entry_points={
'console_scripts': [
"conda = conda.cli.main:main"
],
},
scripts=scripts,
zip_safe=False,
)
<|code_end|>
| conda/_vendor/auxlib/packaging.py
# -*- coding: utf-8 -*-
"""
=====
Usage
=====
Method #1: auxlib.packaging as a run time dependency
---------------------------------------------------
Place the following lines in your package's main __init__.py
from auxlib import get_version
__version__ = get_version(__file__)
Method #2: auxlib.packaging as a build time-only dependency
----------------------------------------------------------
import auxlib
# When executing the setup.py, we need to be able to import ourselves, this
# means that we need to add the src directory to the sys.path.
here = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.join(here, "auxlib")
sys.path.insert(0, src_dir)
setup(
version=auxlib.__version__,
cmdclass={
'build_py': auxlib.BuildPyCommand,
'sdist': auxlib.SDistCommand,
'test': auxlib.Tox,
},
)
Place the following lines in your package's main __init__.py
from auxlib import get_version
__version__ = get_version(__file__)
Method #3: write .version file
------------------------------
Configuring `python setup.py test` for Tox
------------------------------------------
must use setuptools (distutils doesn't have a test cmd)
setup(
version=auxlib.__version__,
cmdclass={
'build_py': auxlib.BuildPyCommand,
'sdist': auxlib.SDistCommand,
'test': auxlib.Tox,
},
)
"""
from __future__ import print_function, division, absolute_import
import sys
from collections import namedtuple
from logging import getLogger
from os import getenv, remove, listdir
from os.path import abspath, dirname, expanduser, isdir, isfile, join
from re import compile
from shlex import split
from subprocess import CalledProcessError, Popen, PIPE
from fnmatch import fnmatchcase
from distutils.util import convert_path
try:
from setuptools.command.build_py import build_py
from setuptools.command.sdist import sdist
from setuptools.command.test import test as TestCommand
except ImportError:
from distutils.command.build_py import build_py
from distutils.command.sdist import sdist
TestCommand = object
log = getLogger(__name__)
Response = namedtuple('Response', ['stdout', 'stderr', 'rc'])
GIT_DESCRIBE_REGEX = compile(r"(?:[_-a-zA-Z]*)"
r"(?P<version>\d+\.\d+\.\d+)"
r"(?:-(?P<dev>\d+)-g(?P<hash>[0-9a-f]{7}))$")
def call(path, command, raise_on_error=True):
p = Popen(split(command), cwd=path, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
rc = p.returncode
log.debug("{0} $ {1}\n"
" stdout: {2}\n"
" stderr: {3}\n"
" rc: {4}"
.format(path, command, stdout, stderr, rc))
if raise_on_error and rc != 0:
raise CalledProcessError(rc, command, "stdout: {0}\nstderr: {1}".format(stdout, stderr))
return Response(stdout.decode('utf-8'), stderr.decode('utf-8'), int(rc))
def _get_version_from_version_file(path):
file_path = join(path, '.version')
if isfile(file_path):
with open(file_path, 'r') as fh:
return fh.read().strip()
def _git_describe_tags(path):
call(path, "git update-index --refresh", raise_on_error=False)
response = call(path, "git describe --tags --long", raise_on_error=False)
if response.rc == 0:
return response.stdout.strip()
elif response.rc == 128 and "no names found" in response.stderr.lower():
# directory is a git repo, but no tags found
return None
elif response.rc == 128 and "not a git repository" in response.stderr.lower():
return None
elif response.rc == 127:
log.error("git not found on path: PATH={0}".format(getenv('PATH', None)))
raise CalledProcessError(response.rc, response.stderr)
else:
raise CalledProcessError(response.rc, response.stderr)
def _get_version_from_git_tag(path):
"""Return a PEP440-compliant version derived from the git status.
If that fails for any reason, return the first 7 chars of the changeset hash.
"""
m = GIT_DESCRIBE_REGEX.match(_git_describe_tags(path) or '')
if m is None:
return None
version, post_commit, hash = m.groups()
return version if post_commit == '0' else "{0}.dev{1}+{2}".format(version, post_commit, hash)
def get_version(dunder_file):
"""Returns a version string for the current package, derived
either from git or from a .version file.
This function is expected to run in two contexts. In a development
context, where .git/ exists, the version is pulled from git tags.
Using the BuildPyCommand and SDistCommand classes for cmdclass in
setup.py will write a .version file into any dist.
In an installed context, the .version file written at dist build
time is the source of version information.
"""
path = abspath(expanduser(dirname(dunder_file)))
return _get_version_from_version_file(path) or _get_version_from_git_tag(path)
def write_version_into_init(target_dir, version):
target_init_file = join(target_dir, "__init__.py")
assert isfile(target_init_file), "File not found: {0}".format(target_init_file)
with open(target_init_file, 'r') as f:
init_lines = f.readlines()
for q in range(len(init_lines)):
if init_lines[q].startswith('__version__'):
init_lines[q] = '__version__ = "{0}"\n'.format(version)
elif init_lines[q].startswith(('from auxlib', 'import auxlib')):
init_lines[q] = None
print("UPDATING {0}".format(target_init_file))
remove(target_init_file)
with open(target_init_file, 'w') as f:
f.write(''.join(l for l in init_lines if l is not None))
def write_version_file(target_dir, version):
assert isdir(target_dir), "Directory not found: {0}".format(target_dir)
target_file = join(target_dir, ".version")
with open(target_file, 'w') as f:
f.write(version)
class BuildPyCommand(build_py):
def run(self):
build_py.run(self)
target_dir = join(self.build_lib, self.distribution.metadata.name)
write_version_into_init(target_dir, self.distribution.metadata.version)
write_version_file(target_dir, self.distribution.metadata.version)
# TODO: separate out .version file implementation
class SDistCommand(sdist):
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
target_dir = join(base_dir, self.distribution.metadata.name)
write_version_into_init(target_dir, self.distribution.metadata.version)
write_version_file(target_dir, self.distribution.metadata.version)
class Tox(TestCommand):
# TODO: Make this class inherit from distutils instead of setuptools
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, because outside the eggs aren't loaded
from tox import cmdline
from shlex import split
args = self.tox_args
if args:
args = split(self.tox_args)
else:
args = ''
errno = cmdline(args=args)
sys.exit(errno)
# swiped from setuptools
def find_packages(where='.', exclude=()):
out = []
stack = [(convert_path(where), '')]
while stack:
where, prefix = stack.pop(0)
for name in listdir(where):
fn = join(where, name)
if ('.' not in name and isdir(fn) and
isfile(join(fn, '__init__.py'))
):
out.append(prefix + name)
stack.append((fn, prefix + name + '.'))
for pat in list(exclude) + ['ez_setup', 'distribute_setup']:
out = [item for item in out if not fnmatchcase(item, pat)]
return out
setup.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
import os
import sys
if 'develop' in sys.argv:
from setuptools import setup
else:
from distutils.core import setup
if not (sys.version_info[:2] == (2, 7) or sys.version_info[:2] >= (3, 3)):
sys.exit("conda is only meant for Python 2.7 or 3.3 and up. "
"current version: %d.%d" % sys.version_info[:2])
if os.environ.get('CONDA_DEFAULT_ENV'):
# Try to prevent accidentally installing conda into a non-root conda environment
sys.exit("""
You appear to be in a non-root conda environment. Conda is only supported in
the root environment. Deactivate and try again. If you believe this message
is in error, run CONDA_DEFAULT_ENV='' python setup.py.
""")
# When executing the setup.py, we need to be able to import ourselves, this
# means that we need to add the src directory to the sys.path.
here = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.join(here, "conda")
sys.path.insert(0, src_dir)
import conda # NOQA
with open(os.path.join(here, "README.rst")) as f:
long_description = f.read()
scripts = ['bin/activate',
'bin/deactivate',
]
if sys.platform == 'win32':
# Powershell scripts should go here
scripts.extend(['bin/activate.bat',
'bin/deactivate.bat'])
setup(
name=conda.__name__,
version=conda.__version__,
author=conda.__author__,
author_email=conda.__email__,
url=conda.__url__,
license=conda.__license__,
description=conda.__summary__,
long_description=long_description,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
packages=conda._vendor.auxlib.packaging.find_packages(exclude=("tests", "conda-env",
"build", "utils", ".tox")),
cmdclass={
'build_py': conda._vendor.auxlib.BuildPyCommand,
'sdist': conda._vendor.auxlib.SDistCommand,
},
install_requires=[
'pycosat >=0.6.1',
],
entry_points={
'console_scripts': [
"conda = conda.cli.main:main"
],
},
scripts=scripts,
zip_safe=False,
)
| conda/_vendor/auxlib/packaging.py
--- a/conda/_vendor/auxlib/packaging.py
+++ b/conda/_vendor/auxlib/packaging.py
@@ -69,11 +69,14 @@
import sys
from collections import namedtuple
from logging import getLogger
-from os import getenv, remove
+from os import getenv, remove, listdir
from os.path import abspath, dirname, expanduser, isdir, isfile, join
from re import compile
from shlex import split
from subprocess import CalledProcessError, Popen, PIPE
+from fnmatch import fnmatchcase
+from distutils.util import convert_path
+
try:
from setuptools.command.build_py import build_py
from setuptools.command.sdist import sdist
@@ -81,8 +84,8 @@
except ImportError:
from distutils.command.build_py import build_py
from distutils.command.sdist import sdist
- TestCommand = object
+ TestCommand = object
log = getLogger(__name__)
@@ -222,3 +225,21 @@ def run_tests(self):
args = ''
errno = cmdline(args=args)
sys.exit(errno)
+
+
+# swiped from setuptools
+def find_packages(where='.', exclude=()):
+ out = []
+ stack = [(convert_path(where), '')]
+ while stack:
+ where, prefix = stack.pop(0)
+ for name in listdir(where):
+ fn = join(where, name)
+ if ('.' not in name and isdir(fn) and
+ isfile(join(fn, '__init__.py'))
+ ):
+ out.append(prefix + name)
+ stack.append((fn, prefix + name + '.'))
+ for pat in list(exclude) + ['ez_setup', 'distribute_setup']:
+ out = [item for item in out if not fnmatchcase(item, pat)]
+ return out
setup.py
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,6 @@
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
import os
import sys
-from setuptools import find_packages
if 'develop' in sys.argv:
from setuptools import setup
else:
@@ -61,7 +60,8 @@
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
],
- packages=find_packages(exclude=("tests", "conda-env", "build", "utils", ".tox")),
+ packages=conda._vendor.auxlib.packaging.find_packages(exclude=("tests", "conda-env",
+ "build", "utils", ".tox")),
cmdclass={
'build_py': conda._vendor.auxlib.BuildPyCommand,
'sdist': conda._vendor.auxlib.SDistCommand, |
Activate issue with parallel activations - "File exists" in symlinked path
I'm working in a cluster environment where I am trying to activate a conda environment in multiple parallel processes. After recently upgrading to the latest release (4.1.2), I've been receiving a "File exists" error in some of those processes, similar to the below:
```
Traceback (most recent call last):
File "/symln_home/anaconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/Users/tomflem/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 48, in main
activate.main()
File "/Users/tomflem/anaconda/lib/python2.7/site-packages/conda/cli/activate.py", line 163, in main
conda.install.symlink_conda(prefix, root_dir, shell)
File "/Users/tomflem/anaconda/lib/python2.7/site-packages/conda/install.py", line 582, in symlink_conda
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
File "/Users/tomflem/anaconda/lib/python2.7/site-packages/conda/install.py", line 599, in symlink_conda_hlp
symlink_fn(root_file, prefix_file)
OSError: [Errno 17] File exists
```
I've been able to repro this issue on a laptop when my PATH to the root anaconda install contains a symlinked directory ("symln_home," in this example). Doesn't seem to be a problem when the PATH is symlink-free. The below has produced the error consistently for me on OS X:
```
$ echo $PATH
/symln_home/anaconda/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
```
``` python
from multiprocessing import Pool
import os
call = ['source activate myenv && echo $PATH' for i in range(30)]
pool = Pool(4)
pool.map(os.system, call)
pool.close()
```
Is this a bug, or are there any workarounds for this particular use case? In my cluster environment, my HOME directory (where I've installed anaconda) is symlinked, which I would assume is a fairly common setup in shared computing (I don't admin it, so unfortunately I can't change that aspect).
Activate issue with parallel activations - "File exists" in symlinked path
I'm working in a cluster environment where I am trying to activate a conda environment in multiple parallel processes. After recently upgrading to the latest release (4.1.2), I've been receiving a "File exists" error in some of those processes, similar to the below:
```
Traceback (most recent call last):
File "/symln_home/anaconda/bin/conda", line 6, in <module>
sys.exit(main())
File "/Users/tomflem/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 48, in main
activate.main()
File "/Users/tomflem/anaconda/lib/python2.7/site-packages/conda/cli/activate.py", line 163, in main
conda.install.symlink_conda(prefix, root_dir, shell)
File "/Users/tomflem/anaconda/lib/python2.7/site-packages/conda/install.py", line 582, in symlink_conda
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
File "/Users/tomflem/anaconda/lib/python2.7/site-packages/conda/install.py", line 599, in symlink_conda_hlp
symlink_fn(root_file, prefix_file)
OSError: [Errno 17] File exists
```
I've been able to repro this issue on a laptop when my PATH to the root anaconda install contains a symlinked directory ("symln_home," in this example). Doesn't seem to be a problem when the PATH is symlink-free. The below has produced the error consistently for me on OS X:
```
$ echo $PATH
/symln_home/anaconda/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
```
``` python
from multiprocessing import Pool
import os
call = ['source activate myenv && echo $PATH' for i in range(30)]
pool = Pool(4)
pool.map(os.system, call)
pool.close()
```
Is this a bug, or are there any workarounds for this particular use case? In my cluster environment, my HOME directory (where I've installed anaconda) is symlinked, which I would assume is a fairly common setup in shared computing (I don't admin it, so unfortunately I can't change that aspect).
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, normpath, normcase)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
import conda.config as config
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
pkgs_dirs = [sys.prefix]
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def exp_backoff_fn(fn, *args):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args)
import random
# with max_tries = 5, max total time ~= 3.2 sec
# with max_tries = 6, max total time ~= 6.5 sec
max_tries = 6
for n in range(max_tries):
try:
result = fn(*args)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (errno.EPERM, errno.EACCES):
if n == max_tries-1:
raise
time.sleep(((2 ** n) + random.random()) * 0.1)
else:
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
return
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
if trash and move_path_to_trash(path):
return
elif isdir(path):
# On Windows, always move to trash first.
if trash and on_win and move_path_to_trash(path, preclean=False):
return
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
if trash and move_path_to_trash(path):
return
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = url_channel(url)
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not isdir(trash_dir):
continue
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
if preclean:
delete_trash()
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_file = tempfile.mktemp(dir=trash_dir)
try:
os.rename(path, trash_file)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_file, e))
else:
log.debug("Moved to trash: %s" % (path,))
delete_linked_data_any(path)
if not preclean:
rm_rf(trash_file, max_retries=1, trash=False)
return True
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
rm_rf(dst)
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if config.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
iterable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
# This CLI is only invoked from the self-extracting shell installers
global pkgs_dirs
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
assert idists
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, normpath, normcase)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
import conda.config as config
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
pkgs_dirs = [sys.prefix]
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def exp_backoff_fn(fn, *args):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args)
import random
# with max_tries = 5, max total time ~= 3.2 sec
# with max_tries = 6, max total time ~= 6.5 sec
max_tries = 6
for n in range(max_tries):
try:
result = fn(*args)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (errno.EPERM, errno.EACCES):
if n == max_tries-1:
raise
time.sleep(((2 ** n) + random.random()) * 0.1)
else:
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
return
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
if trash and move_path_to_trash(path):
return
elif isdir(path):
# On Windows, always move to trash first.
if trash and on_win and move_path_to_trash(path, preclean=False):
return
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
if trash and move_path_to_trash(path):
return
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = url_channel(url)
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not isdir(trash_dir):
continue
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
if preclean:
delete_trash()
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_file = tempfile.mktemp(dir=trash_dir)
try:
os.rename(path, trash_file)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_file, e))
else:
log.debug("Moved to trash: %s" % (path,))
delete_linked_data_any(path)
if not preclean:
rm_rf(trash_file, max_retries=1, trash=False)
return True
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
rm_rf(dst)
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if config.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
iterable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
# This CLI is only invoked from the self-extracting shell installers
global pkgs_dirs
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
assert idists
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -613,7 +613,7 @@ def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
- (e.errno in (errno.EPERM, errno.EACCES, errno.EROFS))):
+ (e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else: |
`conda update --all` wants to downgrade from conda-canary (and then things go haywire)
I just did the following to try out conda-canary:
```
>conda config --add channels conda-canary
>conda update conda
...
The following packages will be UPDATED:
conda: 4.1.9-py35_0 --> 4.2.1-py35_0 conda-canary
Proceed ([y]/n)? y
...
```
I then thought I might update all the packages, and conda wants to downgrade itself:
```
>conda update --all
Fetching package metadata .........
Solving package specifications: ..........
Package plan for installation in environment C:\Anaconda3:
The following packages will be downloaded:
package | build
---------------------------|-----------------
cython-0.24.1 | py35_0 2.2 MB
llvmlite-0.12.1 | py35_0 5.8 MB
lxml-3.6.1 | py35_0 1.6 MB
pytz-2016.6.1 | py35_0 171 KB
pyzmq-15.3.0 | py35_0 551 KB
ruamel_yaml-0.11.14 | py35_0 217 KB
sip-4.18 | py35_0 241 KB
tornado-4.4.1 | py35_0 595 KB
wcwidth-0.1.7 | py35_0 21 KB
win_unicode_console-0.5 | py35_0 27 KB
conda-4.1.11 | py35_0 247 KB
numba-0.27.0 | np111py35_0 1.9 MB
numexpr-2.6.1 | np111py35_0 142 KB
pickleshare-0.7.3 | py35_0 8 KB
prompt_toolkit-1.0.3 | py35_0 308 KB
qt-4.8.7 | vc14_9 50.5 MB
traitlets-4.2.2 | py35_0 113 KB
anaconda-client-1.5.1 | py35_0 166 KB
bokeh-0.12.1 | py35_0 3.3 MB
conda-build-1.21.9 | py35_0 319 KB
ipython-5.0.0 | py35_0 1.0 MB
pyqt-4.11.4 | py35_7 3.8 MB
pyopenssl-16.0.0 | py35_0 68 KB
jupyter_console-5.0.0 | py35_0 69 KB
------------------------------------------------------------
Total: 73.2 MB
The following packages will be UPDATED:
anaconda-client: 1.4.0-py35_0 --> 1.5.1-py35_0
bokeh: 0.12.0-py35_0 --> 0.12.1-py35_0
conda-build: 1.21.6-py35_0 --> 1.21.9-py35_0
cython: 0.24-py35_0 --> 0.24.1-py35_0
ipython: 4.2.0-py35_0 --> 5.0.0-py35_0
jupyter_console: 4.1.1-py35_0 --> 5.0.0-py35_0
llvmlite: 0.11.0-py35_0 --> 0.12.1-py35_0
lxml: 3.6.0-py35_0 --> 3.6.1-py35_0
numba: 0.26.0-np111py35_0 --> 0.27.0-np111py35_0
numexpr: 2.6.0-np111py35_0 --> 2.6.1-np111py35_0
pickleshare: 0.7.2-py35_0 --> 0.7.3-py35_0
prompt_toolkit: 1.0.0-py35_0 xonsh --> 1.0.3-py35_0
pyopenssl: 0.16.0-py35_0 --> 16.0.0-py35_0
pyqt: 4.11.4-py35_6 --> 4.11.4-py35_7
pytz: 2016.4-py35_0 --> 2016.6.1-py35_0
pyzmq: 15.2.0-py35_0 --> 15.3.0-py35_0
qt: 4.8.7-vc14_8 [vc14] --> 4.8.7-vc14_9 [vc14]
ruamel_yaml: 0.11.7-py35_0 --> 0.11.14-py35_0
sip: 4.16.9-py35_2 --> 4.18-py35_0
tornado: 4.3-py35_1 --> 4.4.1-py35_0
traitlets: 4.2.1-py35_0 --> 4.2.2-py35_0
wcwidth: 0.1.5-py35_5 xonsh --> 0.1.7-py35_0
win_unicode_console: 0.4-py35_0 xonsh --> 0.5-py35_0
The following packages will be SUPERCEDED by a higher-priority channel:
conda: 4.2.1-py35_0 conda-canary --> 4.1.11-py35_0
Proceed ([y]/n)? n
```
I'm not sure why upgrading via `conda update conda` would produce a different result for the package than `conda update --all`, intuitively the latter feels like it's a superset of the former, and the channel priority logic should apply the same way to both.
To check that there's no conflict between conda-canary and something else interfering with the `--all` case, I next proceeded with the update, then did `conda update conda` again. It goes haywire at this point:
```
... lots of printout that scrolled out of range ...
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMPrint\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMPrint\.svn\prop-base\DOMPrint.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMPrint\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMPrint\.svn\text-base\DOMPrint.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMPrint\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMPrint\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMPrint
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTest\.svn\prop-base\DOMTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTest\.svn\text-base\DOMTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTraversalTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTraversalTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTraversalTest\.svn\prop-base\DOMTraversalTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTraversalTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTraversalTest\.svn\text-base\DOMTraversalTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTraversalTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTraversalTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTraversalTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTypeInfoTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTypeInfoTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTypeInfoTest\.svn\prop-base\DOMTypeInfoTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTypeInfoTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTypeInfoTest\.svn\text-base\DOMTypeInfoTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTypeInfoTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTypeInfoTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\DOMTypeInfoTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EncodingTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EncodingTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EncodingTest\.svn\prop-base\EncodingTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EncodingTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EncodingTest\.svn\text-base\EncodingTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EncodingTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EncodingTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EncodingTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EnumVal\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EnumVal\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EnumVal\.svn\prop-base\EnumVal.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EnumVal\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EnumVal\.svn\text-base\EnumVal.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EnumVal\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EnumVal\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\EnumVal
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\InitTermTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\InitTermTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\InitTermTest\.svn\prop-base\InitTermTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\InitTermTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\InitTermTest\.svn\text-base\InitTermTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\InitTermTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\InitTermTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\InitTermTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemHandlerTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemHandlerTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemHandlerTest\.svn\prop-base\MemHandlerTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemHandlerTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemHandlerTest\.svn\text-base\MemHandlerTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemHandlerTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemHandlerTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemHandlerTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemParse\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemParse\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemParse\.svn\prop-base\MemParse.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemParse\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemParse\.svn\text-base\MemParse.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemParse\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemParse\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\MemParse
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PParse\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PParse\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PParse\.svn\prop-base\PParse.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PParse\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PParse\.svn\text-base\PParse.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PParse\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PParse\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PParse
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PSVIWriter\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PSVIWriter\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PSVIWriter\.svn\prop-base\PSVIWriter.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PSVIWriter\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PSVIWriter\.svn\text-base\PSVIWriter.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PSVIWriter\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PSVIWriter\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\PSVIWriter
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\RangeTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\RangeTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\RangeTest\.svn\prop-base\RangeTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\RangeTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\RangeTest\.svn\text-base\RangeTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\RangeTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\RangeTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\RangeTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\Redirect\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\Redirect\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\Redirect\.svn\prop-base\Redirect.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\Redirect\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\Redirect\.svn\text-base\Redirect.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\Redirect\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\Redirect\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\Redirect
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Count\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Count\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Count\.svn\prop-base\SAX2Count.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Count\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Count\.svn\text-base\SAX2Count.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Count\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Count\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Count
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Print\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Print\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Print\.svn\prop-base\SAX2Print.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Print\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Print\.svn\text-base\SAX2Print.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Print\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Print\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAX2Print
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXCount\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXCount\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXCount\.svn\prop-base\SAXCount.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXCount\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXCount\.svn\text-base\SAXCount.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXCount\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXCount\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXCount
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXPrint\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXPrint\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXPrint\.svn\prop-base\SAXPrint.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXPrint\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXPrint\.svn\text-base\SAXPrint.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXPrint\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXPrint\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SAXPrint
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SCMPrint\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SCMPrint\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SCMPrint\.svn\prop-base\SCMPrint.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SCMPrint\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SCMPrint\.svn\text-base\SCMPrint.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SCMPrint\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SCMPrint\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SCMPrint
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SEnumVal\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SEnumVal\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SEnumVal\.svn\prop-base\SEnumVal.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SEnumVal\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SEnumVal\.svn\text-base\SEnumVal.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SEnumVal\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SEnumVal\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\SEnumVal
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\StdInParse\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\StdInParse\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\StdInParse\.svn\prop-base\StdInParse.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\StdInParse\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\StdInParse\.svn\text-base\StdInParse.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\StdInParse\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\StdInParse\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\StdInParse
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\ThreadTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\ThreadTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\ThreadTest\.svn\prop-base\ThreadTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\ThreadTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\ThreadTest\.svn\text-base\ThreadTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\ThreadTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\ThreadTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\ThreadTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XercesLib\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XercesLib\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XercesLib\.svn\prop-base\XercesLib.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XercesLib\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XercesLib\.svn\text-base\XercesLib.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XercesLib\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XercesLib\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XercesLib
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XInclude\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XInclude\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XInclude\.svn\prop-base\XInclude.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XInclude\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XInclude\.svn\text-base\XInclude.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XInclude\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XInclude\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XInclude
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSerializerTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSerializerTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSerializerTest\.svn\prop-base\XSerializerTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSerializerTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSerializerTest\.svn\text-base\XSerializerTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSerializerTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSerializerTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSerializerTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSTSHarness\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSTSHarness\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSTSHarness\.svn\prop-base\XSTSHarness.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSTSHarness\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSTSHarness\.svn\text-base\XSTSHarness.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSTSHarness\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSTSHarness\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSTSHarness
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSValueTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSValueTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSValueTest\.svn\prop-base\XSValueTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSValueTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSValueTest\.svn\text-base\XSValueTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSValueTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSValueTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all\XSValueTest
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8\xerces-all
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC8
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\.svn\entries
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\.svn
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\.svn\prop-base\xerces-all.sln.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\.svn\text-base\xerces-all.sln.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\.svn
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\all\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\all\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\all\.svn\prop-base\all.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\all\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\all\.svn\text-base\all.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\all\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\all\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\all
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\CreateDOMDocument\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\CreateDOMDocument\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\CreateDOMDocument\.svn\prop-base\CreateDOMDocument.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\CreateDOMDocument\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\CreateDOMDocument\.svn\text-base\CreateDOMDocument.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\CreateDOMDocument\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\CreateDOMDocument\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\CreateDOMDocument
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMCount\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMCount\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMCount\.svn\prop-base\DOMCount.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMCount\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMCount\.svn\text-base\DOMCount.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMCount\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMCount\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMCount
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMMemTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMMemTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMMemTest\.svn\prop-base\DOMMemTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMMemTest\.svn\prop-base
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMMemTest\.svn\text-base\DOMMemTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMMemTest\.svn\text-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMMemTest\.svn
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMMemTest
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMNormalizerTest\.svn\all-wcprops
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMNormalizerTest\.svn\entries
WARNING:conda.install:Cannot remove, permission denied: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMNormalizerTest\.svn\prop-base\DOMNormalizerTest.vcproj.svn-base
WARNING:conda.install:Cannot remove, not empty: C:\Anaconda3\pkgs\.trash\tmp9n2xgmlr\xerces-c-3.1.3\projects\Win32\VC9\xerces-all\DOMNormalizerTest\.svn\prop-base
Traceback (most recent call last):
File "C:\Anaconda3\lib\site-packages\conda\install.py", line 270, in rm_rf
PermissionError: [WinError 5] Access is denied: 'C:\\Anaconda3\\Scripts/conda.exe'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Anaconda3\lib\shutil.py", line 381, in _rmtree_unsafe
os.unlink(fullname)
PermissionError: [WinError 5] Access is denied: 'C:\\Anaconda3\\pkgs\\.trash\\tmp9n2xgmlr\\xerces-c-3.1.3\\projects\\Win32\\VC9\\xerces-all\\DOMNormalizerTest\\.svn\\text-base\\DOMNormalizerTest.vcproj.svn-base'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Anaconda3\Scripts\conda-script.py", line 5, in <module>
File "C:\Anaconda3\lib\site-packages\conda\cli\main.py", line 120, in main
File "C:\Anaconda3\lib\site-packages\conda\cli\main.py", line 130, in args_func
File "C:\Anaconda3\lib\site-packages\conda\cli\main_update.py", line 64, in execute
File "C:\Anaconda3\lib\site-packages\conda\cli\install.py", line 407, in install
File "C:\Anaconda3\lib\site-packages\conda\plan.py", line 599, in execute_actions
File "C:\Anaconda3\lib\site-packages\conda\instructions.py", line 135, in execute_instructions
File "C:\Anaconda3\lib\site-packages\conda\instructions.py", line 82, in UNLINK_CMD
File "C:\Anaconda3\lib\site-packages\conda\install.py", line 1127, in unlink
File "C:\Anaconda3\lib\site-packages\conda\install.py", line 274, in rm_rf
File "C:\Anaconda3\lib\site-packages\conda\install.py", line 1007, in move_path_to_trash
File "C:\Anaconda3\lib\site-packages\conda\install.py", line 985, in delete_trash
File "C:\Anaconda3\lib\site-packages\conda\install.py", line 285, in rm_rf
File "C:\Anaconda3\lib\shutil.py", line 488, in rmtree
return _rmtree_unsafe(path, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 378, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "C:\Anaconda3\lib\shutil.py", line 381, in _rmtree_unsafe
os.unlink(fullname)
KeyboardInterrupt
>conda update conda
Cannot open C:\Anaconda3\Scripts\conda-script.py
```
I guess this is why it's "canary" :)
| conda/base/constants.py
<|code_start|>
# -*- coding: utf-8 -*-
"""
This file should hold almost all string literals and magic numbers used throughout the code base.
The exception is if a literal is specifically meant to be private to and isolated within a module.
"""
from __future__ import absolute_import, division, print_function
import os
import sys
from logging import getLogger
from platform import machine
from enum import Enum
from conda._vendor.auxlib.collection import frozendict
log = getLogger(__name__)
class Arch(Enum):
x86 = 'x86'
x86_64 = 'x86_64'
armv6l = 'armv6l'
armv7l = 'armv7l'
ppc64le = 'ppc64le'
@classmethod
def from_sys(cls):
return cls[machine()]
class Platform(Enum):
linux = 'linux'
win = 'win32'
openbsd = 'openbsd5'
osx = 'darwin'
@classmethod
def from_sys(cls):
p = sys.platform
if p.startswith('linux'):
# Changed in version 2.7.3: Since lots of code check for sys.platform == 'linux2',
# and there is no essential change between Linux 2.x and 3.x, sys.platform is always
# set to 'linux2', even on Linux 3.x. In Python 3.3 and later, the value will always
# be set to 'linux'
p = 'linux'
return cls(p)
machine_bits = 8 * tuple.__itemsize__
# UID = os.getuid()
PWD = os.getcwd()
CONDA = 'CONDA'
CONDA_ = 'CONDA_'
conda = 'conda'
SEARCH_PATH = (
'/etc/conda/condarc',
'/etc/conda/condarc.d/',
'/var/lib/conda/condarc',
'/var/lib/conda/condarc.d/',
'$CONDA_ROOT/condarc',
'$CONDA_ROOT/.condarc',
'$CONDA_ROOT/condarc.d/',
'$HOME/.conda/condarc',
'$HOME/.conda/condarc.d/',
'$HOME/.condarc',
'$CONDA_PREFIX/.condarc',
'$CONDA_PREFIX/condarc.d/',
'$CONDARC',
)
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
PLATFORM_DIRECTORIES = ("linux-64", "linux-32",
"win-64", "win-32",
"osx-64", "noarch")
RECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file')
if Platform.from_sys() is Platform.win:
DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro',
'https://repo.continuum.io/pkgs/msys2',
)
else:
DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro',
)
ROOT_ENV_NAME = 'root'
EMPTY_LIST = ()
EMPTY_MAP = frozendict(dict())
<|code_end|>
| conda/base/constants.py
# -*- coding: utf-8 -*-
"""
This file should hold almost all string literals and magic numbers used throughout the code base.
The exception is if a literal is specifically meant to be private to and isolated within a module.
"""
from __future__ import absolute_import, division, print_function
import os
import sys
from logging import getLogger
from platform import machine
from enum import Enum
from conda._vendor.auxlib.collection import frozendict
log = getLogger(__name__)
class Arch(Enum):
x86 = 'x86'
x86_64 = 'x86_64'
armv6l = 'armv6l'
armv7l = 'armv7l'
ppc64le = 'ppc64le'
@classmethod
def from_sys(cls):
return cls[machine()]
class Platform(Enum):
linux = 'linux'
win = 'win32'
openbsd = 'openbsd5'
osx = 'darwin'
@classmethod
def from_sys(cls):
p = sys.platform
if p.startswith('linux'):
# Changed in version 2.7.3: Since lots of code check for sys.platform == 'linux2',
# and there is no essential change between Linux 2.x and 3.x, sys.platform is always
# set to 'linux2', even on Linux 3.x. In Python 3.3 and later, the value will always
# be set to 'linux'
p = 'linux'
return cls(p)
machine_bits = 8 * tuple.__itemsize__
# UID = os.getuid()
PWD = os.getcwd()
CONDA = 'CONDA'
CONDA_ = 'CONDA_'
conda = 'conda'
SEARCH_PATH = (
'/etc/conda/condarc',
'/etc/conda/condarc.d/',
'/var/lib/conda/condarc',
'/var/lib/conda/condarc.d/',
'$CONDA_ROOT/condarc',
'$CONDA_ROOT/.condarc',
'$CONDA_ROOT/condarc.d/',
'~/.conda/condarc',
'~/.conda/condarc.d/',
'~/.condarc',
'$CONDA_PREFIX/.condarc',
'$CONDA_PREFIX/condarc.d/',
'$CONDARC',
)
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
PLATFORM_DIRECTORIES = ("linux-64", "linux-32",
"win-64", "win-32",
"osx-64", "noarch")
RECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file')
if Platform.from_sys() is Platform.win:
DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro',
'https://repo.continuum.io/pkgs/msys2',
)
else:
DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro',
)
ROOT_ENV_NAME = 'root'
EMPTY_LIST = ()
EMPTY_MAP = frozendict(dict())
| conda/base/constants.py
--- a/conda/base/constants.py
+++ b/conda/base/constants.py
@@ -62,9 +62,9 @@ def from_sys(cls):
'$CONDA_ROOT/condarc',
'$CONDA_ROOT/.condarc',
'$CONDA_ROOT/condarc.d/',
- '$HOME/.conda/condarc',
- '$HOME/.conda/condarc.d/',
- '$HOME/.condarc',
+ '~/.conda/condarc',
+ '~/.conda/condarc.d/',
+ '~/.condarc',
'$CONDA_PREFIX/.condarc',
'$CONDA_PREFIX/condarc.d/',
'$CONDARC', |
Zsh.exe not supported on MSYS2
The following error is reported in a MSYS2 zsh shell:
```
➜ dotfiles git:(master) ✗ source activate py35_32
Traceback (most recent call last):
File "C:\Miniconda3\Scripts\conda-script.py", line 5, in <module>
sys.exit(main())
File "C:\Miniconda3\lib\site-packages\conda\cli\main.py", line 48, in main
activate.main()
File "C:\Miniconda3\lib\site-packages\conda\cli\activate.py", line 105, in main
shelldict = shells[shell]
KeyError: 'zsh.exe'
```
| conda/utils.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import collections
import errno
import hashlib
import logging
import os
import re
import sys
import time
import threading
from functools import partial
from os.path import isdir, join, basename, exists
# conda build import
from .common.url import path_to_url
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
on_win = bool(sys.platform == "win32")
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
self.lock = threading.Lock()
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(sorted(kw.items())))
with self.lock:
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(sorted(kw.items())))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
@memoized
def gnu_get_libc_version():
"""
If on linux, get installed version of glibc, otherwise return None
"""
if not sys.platform.startswith('linux'):
return None
from ctypes import CDLL, cdll, c_char_p
cdll.LoadLibrary('libc.so.6')
libc = CDLL('libc.so.6')
f = libc.gnu_get_libc_version
f.restype = c_char_p
return f()
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
Args:
dir_path (str): directory to test write access
heavy (bool): Actually create and delete a file, or do a faster os.access test.
https://docs.python.org/dev/library/os.html?highlight=xattr#os.access
Returns:
bool
"""
if not isdir(dir_path):
return False
if on_win or heavy:
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % os.getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
backoff_unlink(temp_filename)
return True
except (IOError, OSError):
return False
finally:
backoff_unlink(temp_filename)
else:
return os.access(dir_path, os.W_OK)
def backoff_unlink(path):
try:
exp_backoff_fn(lambda f: exists(f) and os.unlink(f), path)
except (IOError, OSError) as e:
if e.errno not in (errno.ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def path_identity(path):
"""Used as a dummy path converter where no conversion necessary"""
return path
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def _translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/")
return root_prefix + "/" + found
path = re.sub(path_re, _translation, path).replace(";/", ":/")
return path
def unix_path_to_win(path, root_prefix=""):
"""Convert a path or :-separated string of paths into a Windows representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
# already a windows path
return path.replace("/", "\\")
path_re = root_prefix + r'(/[a-zA-Z]/(?:(?![:\s]/)[^:*?"<>])*)'
def _translation(found_path):
group = found_path.group(0)
return "{0}:{1}".format(group[len(root_prefix)+1],
group[len(root_prefix)+2:].replace("/", "\\"))
translation = re.sub(path_re, _translation, path)
translation = re.sub(":([a-zA-Z]):\\\\",
lambda match: ";" + match.group(0)[1] + ":\\",
translation)
return translation
# curry cygwin functions
def win_path_to_cygwin(path):
return win_path_to_unix(path, "/cygdrive")
def cygwin_path_to_win(path):
return unix_path_to_win(path, "/cygdrive")
def translate_stream(stream, translator):
return "\n".join(translator(line) for line in stream.split("\n"))
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
# TODO: this should be done in a more extensible way
# (like files for each shell, with some registration mechanism.)
# defaults for unix shells. Note: missing "exe" entry, which should be set to
# either an executable on PATH, or a full path to an executable for a shell
unix_shell_base = dict(
binpath="/bin/", # mind the trailing slash.
echo="echo",
env_script_suffix=".sh",
nul='2>/dev/null',
path_from=path_identity,
path_to=path_identity,
pathsep=":",
printdefaultenv='echo $CONDA_DEFAULT_ENV',
printpath="echo $PATH",
printps1='echo $PS1',
promptvar='PS1',
sep="/",
set_var='export ',
shell_args=["-l", "-c"],
shell_suffix="",
slash_convert=("\\", "/"),
source_setup="source",
test_echo_extra="",
var_format="${}",
)
msys2_shell_base = dict(
unix_shell_base,
path_from=unix_path_to_win,
path_to=win_path_to_unix,
binpath="/Scripts/", # mind the trailing slash.
)
if on_win:
shells = {
# "powershell.exe": dict(
# echo="echo",
# test_echo_extra=" .",
# var_format="${var}",
# binpath="/bin/", # mind the trailing slash.
# source_setup="source",
# nul='2>/dev/null',
# set_var='export ',
# shell_suffix=".ps",
# env_script_suffix=".ps",
# printps1='echo $PS1',
# printdefaultenv='echo $CONDA_DEFAULT_ENV',
# printpath="echo %PATH%",
# exe="powershell.exe",
# path_from=path_identity,
# path_to=path_identity,
# slash_convert = ("/", "\\"),
# ),
"cmd.exe": dict(
echo="@echo",
var_format="%{}%",
binpath="\\Scripts\\", # mind the trailing slash.
source_setup="call",
test_echo_extra="",
nul='1>NUL 2>&1',
set_var='set ',
shell_suffix=".bat",
env_script_suffix=".bat",
printps1="@echo %PROMPT%",
promptvar="PROMPT",
# parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA
printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n'
'echo %CONDA_DEFAULT_ENV% ) ELSE (\n'
'echo()',
printpath="@echo %PATH%",
exe="cmd.exe",
shell_args=["/d", "/c"],
path_from=path_identity,
path_to=path_identity,
slash_convert=("/", "\\"),
sep="\\",
pathsep=";",
),
"cygwin": dict(
unix_shell_base,
exe="bash.exe",
binpath="/Scripts/", # mind the trailing slash.
path_from=cygwin_path_to_win,
path_to=win_path_to_cygwin
),
# bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin
# entry instead. The only major difference is that it handle's cygwin's /cygdrive
# filesystem root.
"bash.exe": dict(
msys2_shell_base, exe="bash.exe",
),
"bash": dict(
msys2_shell_base, exe="bash",
),
"sh.exe": dict(
msys2_shell_base, exe="sh.exe",
),
}
else:
shells = {
"bash": dict(
unix_shell_base, exe="bash",
),
"zsh": dict(
unix_shell_base, exe="zsh",
),
"fish": dict(
unix_shell_base, exe="fish",
pathsep=" ",
),
}
def exp_backoff_fn(fn, *args):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
max_tries = 7
for n in range(max_tries):
try:
result = fn(*args)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (errno.EPERM, errno.EACCES):
if n == max_tries-1:
raise
sleep_time = ((2 ** n) + random.random()) * 0.1
caller_frame = sys._getframe(1)
log.debug("retrying %s/%s %s() in %g sec",
basename(caller_frame.f_code.co_filename),
caller_frame.f_lineno, fn.__name__,
sleep_time)
time.sleep(sleep_time)
elif e.errno in (errno.ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
else:
log.error("Uncaught backoff with errno %d", e.errno)
raise
else:
return result
# put back because of conda build
urlpath = url_path = path_to_url
<|code_end|>
| conda/utils.py
from __future__ import print_function, division, absolute_import
import collections
import errno
import hashlib
import logging
import os
import re
import sys
import time
import threading
from functools import partial
from os.path import isdir, join, basename, exists
# conda build import
from .common.url import path_to_url
log = logging.getLogger(__name__)
stderrlog = logging.getLogger('stderrlog')
on_win = bool(sys.platform == "win32")
class memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
"""
def __init__(self, func):
self.func = func
self.cache = {}
self.lock = threading.Lock()
def __call__(self, *args, **kw):
newargs = []
for arg in args:
if isinstance(arg, list):
newargs.append(tuple(arg))
elif not isinstance(arg, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args, **kw)
else:
newargs.append(arg)
newargs = tuple(newargs)
key = (newargs, frozenset(sorted(kw.items())))
with self.lock:
if key in self.cache:
return self.cache[key]
else:
value = self.func(*args, **kw)
self.cache[key] = value
return value
# For instance methods only
class memoize(object): # 577452
def __init__(self, func):
self.func = func
def __get__(self, obj, objtype=None):
if obj is None:
return self.func
return partial(self, obj)
def __call__(self, *args, **kw):
obj = args[0]
try:
cache = obj.__cache
except AttributeError:
cache = obj.__cache = {}
key = (self.func, args[1:], frozenset(sorted(kw.items())))
try:
res = cache[key]
except KeyError:
res = cache[key] = self.func(*args, **kw)
return res
@memoized
def gnu_get_libc_version():
"""
If on linux, get installed version of glibc, otherwise return None
"""
if not sys.platform.startswith('linux'):
return None
from ctypes import CDLL, cdll, c_char_p
cdll.LoadLibrary('libc.so.6')
libc = CDLL('libc.so.6')
f = libc.gnu_get_libc_version
f.restype = c_char_p
return f()
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
Args:
dir_path (str): directory to test write access
heavy (bool): Actually create and delete a file, or do a faster os.access test.
https://docs.python.org/dev/library/os.html?highlight=xattr#os.access
Returns:
bool
"""
if not isdir(dir_path):
return False
if on_win or heavy:
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % os.getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
backoff_unlink(temp_filename)
return True
except (IOError, OSError):
return False
finally:
backoff_unlink(temp_filename)
else:
return os.access(dir_path, os.W_OK)
def backoff_unlink(path):
try:
exp_backoff_fn(lambda f: exists(f) and os.unlink(f), path)
except (IOError, OSError) as e:
if e.errno not in (errno.ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
def hashsum_file(path, mode='md5'):
h = hashlib.new(mode)
with open(path, 'rb') as fi:
while True:
chunk = fi.read(262144) # process chunks of 256KB
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def md5_file(path):
return hashsum_file(path, 'md5')
def path_identity(path):
"""Used as a dummy path converter where no conversion necessary"""
return path
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def _translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/")
return root_prefix + "/" + found
path = re.sub(path_re, _translation, path).replace(";/", ":/")
return path
def unix_path_to_win(path, root_prefix=""):
"""Convert a path or :-separated string of paths into a Windows representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
if len(path) > 1 and (";" in path or (path[1] == ":" and path.count(":") == 1)):
# already a windows path
return path.replace("/", "\\")
path_re = root_prefix + r'(/[a-zA-Z]/(?:(?![:\s]/)[^:*?"<>])*)'
def _translation(found_path):
group = found_path.group(0)
return "{0}:{1}".format(group[len(root_prefix)+1],
group[len(root_prefix)+2:].replace("/", "\\"))
translation = re.sub(path_re, _translation, path)
translation = re.sub(":([a-zA-Z]):\\\\",
lambda match: ";" + match.group(0)[1] + ":\\",
translation)
return translation
# curry cygwin functions
def win_path_to_cygwin(path):
return win_path_to_unix(path, "/cygdrive")
def cygwin_path_to_win(path):
return unix_path_to_win(path, "/cygdrive")
def translate_stream(stream, translator):
return "\n".join(translator(line) for line in stream.split("\n"))
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%d B' % n
k = n/1024
if k < 1024:
return '%d KB' % round(k)
m = k/1024
if m < 1024:
return '%.1f MB' % m
g = m/1024
return '%.2f GB' % g
# TODO: this should be done in a more extensible way
# (like files for each shell, with some registration mechanism.)
# defaults for unix shells. Note: missing "exe" entry, which should be set to
# either an executable on PATH, or a full path to an executable for a shell
unix_shell_base = dict(
binpath="/bin/", # mind the trailing slash.
echo="echo",
env_script_suffix=".sh",
nul='2>/dev/null',
path_from=path_identity,
path_to=path_identity,
pathsep=":",
printdefaultenv='echo $CONDA_DEFAULT_ENV',
printpath="echo $PATH",
printps1='echo $PS1',
promptvar='PS1',
sep="/",
set_var='export ',
shell_args=["-l", "-c"],
shell_suffix="",
slash_convert=("\\", "/"),
source_setup="source",
test_echo_extra="",
var_format="${}",
)
msys2_shell_base = dict(
unix_shell_base,
path_from=unix_path_to_win,
path_to=win_path_to_unix,
binpath="/Scripts/", # mind the trailing slash.
)
if on_win:
shells = {
# "powershell.exe": dict(
# echo="echo",
# test_echo_extra=" .",
# var_format="${var}",
# binpath="/bin/", # mind the trailing slash.
# source_setup="source",
# nul='2>/dev/null',
# set_var='export ',
# shell_suffix=".ps",
# env_script_suffix=".ps",
# printps1='echo $PS1',
# printdefaultenv='echo $CONDA_DEFAULT_ENV',
# printpath="echo %PATH%",
# exe="powershell.exe",
# path_from=path_identity,
# path_to=path_identity,
# slash_convert = ("/", "\\"),
# ),
"cmd.exe": dict(
echo="@echo",
var_format="%{}%",
binpath="\\Scripts\\", # mind the trailing slash.
source_setup="call",
test_echo_extra="",
nul='1>NUL 2>&1',
set_var='set ',
shell_suffix=".bat",
env_script_suffix=".bat",
printps1="@echo %PROMPT%",
promptvar="PROMPT",
# parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA
printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n'
'echo %CONDA_DEFAULT_ENV% ) ELSE (\n'
'echo()',
printpath="@echo %PATH%",
exe="cmd.exe",
shell_args=["/d", "/c"],
path_from=path_identity,
path_to=path_identity,
slash_convert=("/", "\\"),
sep="\\",
pathsep=";",
),
"cygwin": dict(
unix_shell_base,
exe="bash.exe",
binpath="/Scripts/", # mind the trailing slash.
path_from=cygwin_path_to_win,
path_to=win_path_to_cygwin
),
# bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin
# entry instead. The only major difference is that it handle's cygwin's /cygdrive
# filesystem root.
"bash.exe": dict(
msys2_shell_base, exe="bash.exe",
),
"bash": dict(
msys2_shell_base, exe="bash",
),
"sh.exe": dict(
msys2_shell_base, exe="sh.exe",
),
"zsh.exe": dict(
msys2_shell_base, exe="zsh.exe",
),
"zsh": dict(
msys2_shell_base, exe="zsh",
),
}
else:
shells = {
"bash": dict(
unix_shell_base, exe="bash",
),
"zsh": dict(
unix_shell_base, exe="zsh",
),
"fish": dict(
unix_shell_base, exe="fish",
pathsep=" ",
),
}
def exp_backoff_fn(fn, *args):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
max_tries = 7
for n in range(max_tries):
try:
result = fn(*args)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (errno.EPERM, errno.EACCES):
if n == max_tries-1:
raise
sleep_time = ((2 ** n) + random.random()) * 0.1
caller_frame = sys._getframe(1)
log.debug("retrying %s/%s %s() in %g sec",
basename(caller_frame.f_code.co_filename),
caller_frame.f_lineno, fn.__name__,
sleep_time)
time.sleep(sleep_time)
elif e.errno in (errno.ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
else:
log.error("Uncaught backoff with errno %d", e.errno)
raise
else:
return result
# put back because of conda build
urlpath = url_path = path_to_url
| conda/utils.py
--- a/conda/utils.py
+++ b/conda/utils.py
@@ -313,6 +313,12 @@ def human_bytes(n):
"sh.exe": dict(
msys2_shell_base, exe="sh.exe",
),
+ "zsh.exe": dict(
+ msys2_shell_base, exe="zsh.exe",
+ ),
+ "zsh": dict(
+ msys2_shell_base, exe="zsh",
+ ),
}
else: |
conda canary - unable to create environment yml file
I just did
`
(anabase) (psreldev) psel701: /reg/g/psdm/sw/conda/logs $ conda env export > ../manage/config/environment-anabase-1.0.0.yml`
anabase is the environment, I also add the account, psreldev, that I'm logged into, and got this output
```
n unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Current conda install:
platform : linux-64
conda version : 4.2.1
conda is private : False
conda-env version : 2.5.2
conda-build version : 1.21.11+0.g5b44ab3.dirty
python version : 2.7.12.final.0
requests version : 2.10.0
root environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7 (writable)
default environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/anabase
envs directories : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs
package cache : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs
channel URLs : file:///reg/g/psdm/sw/conda/channels/system-rhel7
file:///reg/g/psdm/sw/conda/channels/psana-rhel7
file:///reg/g/psdm/sw/conda/channels/external-rhel7
defaults
scikit-beam
file:///reg/g/psdm/sw/conda/channels/testing-rhel7
config file : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/.condarc
offline mode : False
`$ /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/bin/conda-env export`
Traceback (most recent call last):
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/exceptions.py", line 403, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_env/cli/main_export.py", line 93, in execute
ignore_channels=args.ignore_prefix)
AttributeError: 'Namespace' object has no attribute 'ignore_prefix'
../manage/config/environment-anabase-1.0.0.yml (END)
```
Here is the environment, if this matters - there is one pip in there, nose2
```
(anabase) (psreldev) psel701: /reg/g/psdm/sw/conda/logs $ conda list
# packages in environment at /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/anabase:
#
backports 1.0 py27_0
boost 1.57.0 4
cairo 1.12.18 6
coverage 4.1 py27_0
cycler 0.10.0 py27_0
cython 0.24.1 py27_0
decorator 4.0.10 py27_0
szip 2.1 100 file:///reg/g/psdm/sw/conda/channels/external-rhel7
h5py 2.5.0 py27_hdf518_mpi4py2_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
hdf5 1.8.17 openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
mpi4py 2.0.0 py27_openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
openmpi 1.10.3 lsf_verbs_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
tables 3.2.3.1 py27_hdf18_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
fontconfig 2.11.1 6
freetype 2.5.5 1
get_terminal_size 1.0.0 py27_0
glib 2.43.0 1
harfbuzz 0.9.39 1
icu 54.1 0
ipython 5.0.0 py27_0
ipython_genutils 0.1.0 py27_0
jbig 2.1 0
jinja2 2.8 py27_1
jpeg 8d 1
libffi 3.2.1 0
libgfortran 3.0.0 1
libpng 1.6.22 0
libsodium 1.0.10 0
libtiff 4.0.6 2
libxml2 2.9.2 0
markupsafe 0.23 py27_2
matplotlib 1.5.1 np111py27_0
mkl 11.3.3 0
mysql 5.5.24 0
networkx 1.11 py27_0
nose 1.3.7 py27_1
nose2 0.6.5 <pip>
numexpr 2.6.1 np111py27_0
numpy 1.11.1 py27_0
openssl 1.0.2h 1
pandas 0.18.1 np111py27_0
pango 1.39.0 1
path.py 8.2.1 py27_0
pathlib2 2.1.0 py27_0
pexpect 4.0.1 py27_0
pickleshare 0.7.3 py27_0
pillow 3.3.0 py27_0
pip 8.1.2 py27_0
pixman 0.32.6 0
prompt_toolkit 1.0.3 py27_0
ptyprocess 0.5.1 py27_0
pycairo 1.10.0 py27_0
pygments 2.1.3 py27_0
pyparsing 2.1.4 py27_0
pyqt 4.11.4 py27_4
pyqtgraph 0.9.10 py27_1
python 2.7.12 1
python-dateutil 2.5.3 py27_0
pytz 2016.6.1 py27_0
pyzmq 15.3.0 py27_0
qt 4.8.5 0
readline 6.2 2
lmfit 0.8.3 py27_0 scikit-beam
scikit-beam 0.0.8 py27_0 scikit-beam
xraylib 3.1.0 nppy27_0 scikit-beam
scikit-image 0.12.3 np111py27_1
scikit-learn 0.17.1 np111py27_2
scipy 0.18.0 np111py27_0
scons 2.3.0 py27_0
setuptools 23.0.0 py27_0
simplegeneric 0.8.1 py27_1
sip 4.18 py27_0
six 1.10.0 py27_0
sqlite 3.13.0 0
tk 8.5.18 0
traitlets 4.2.2 py27_0
wcwidth 0.1.7 py27_0
wheel 0.29.0 py27_0
xz 5.2.2 0
zeromq 4.1.4 0
zlib 1.2.8 3
```
| conda_env/specs/requirements.py
<|code_start|>
import os
from .. import env
class RequirementsSpec(object):
'''
Reads depedencies from a requirements.txt file
and returns an Environment object from it.
'''
msg = None
def __init__(self, filename=None, name=None, **kwargs):
self.filename = filename
self.name = name
self.msg = None
def _valid_file(self):
if os.path.exists(self.filename):
return True
else:
self.msg = "There is no requirements.txt"
return False
def _valid_name(self):
if self.name is None:
self.msg = "Environment with requierements.txt file needs a name"
return False
else:
return True
def can_handle(self):
return self._valid_file() and self._valid_name()
@property
def environment(self):
dependencies = []
with open(self.filename) as reqfile:
for line in reqfile:
dependencies.append(line)
return env.Environment(
name=self.name,
dependencies=dependencies
)
<|code_end|>
| conda_env/specs/requirements.py
import os
from .. import env
class RequirementsSpec(object):
'''
Reads depedencies from a requirements.txt file
and returns an Environment object from it.
'''
msg = None
def __init__(self, filename=None, name=None, **kwargs):
self.filename = filename
self.name = name
self.msg = None
def _valid_file(self):
if os.path.exists(self.filename):
return True
else:
self.msg = "There is no requirements.txt"
return False
def _valid_name(self):
if self.name is None:
self.msg = "Environment with requierements.txt file needs a name"
return False
else:
return True
def can_handle(self):
return self._valid_file() and self._valid_name()
@property
def environment(self):
dependencies = []
with open(self.filename) as reqfile:
for line in reqfile:
line = line.strip()
if not line or line.startswith('#'):
continue
dependencies.append(line)
return env.Environment(
name=self.name,
dependencies=dependencies
)
| conda_env/specs/requirements.py
--- a/conda_env/specs/requirements.py
+++ b/conda_env/specs/requirements.py
@@ -37,6 +37,9 @@ def environment(self):
dependencies = []
with open(self.filename) as reqfile:
for line in reqfile:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
dependencies.append(line)
return env.Environment(
name=self.name, |
conda canary - unable to create environment yml file
I just did
`
(anabase) (psreldev) psel701: /reg/g/psdm/sw/conda/logs $ conda env export > ../manage/config/environment-anabase-1.0.0.yml`
anabase is the environment, I also add the account, psreldev, that I'm logged into, and got this output
```
n unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Current conda install:
platform : linux-64
conda version : 4.2.1
conda is private : False
conda-env version : 2.5.2
conda-build version : 1.21.11+0.g5b44ab3.dirty
python version : 2.7.12.final.0
requests version : 2.10.0
root environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7 (writable)
default environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/anabase
envs directories : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs
package cache : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs
channel URLs : file:///reg/g/psdm/sw/conda/channels/system-rhel7
file:///reg/g/psdm/sw/conda/channels/psana-rhel7
file:///reg/g/psdm/sw/conda/channels/external-rhel7
defaults
scikit-beam
file:///reg/g/psdm/sw/conda/channels/testing-rhel7
config file : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/.condarc
offline mode : False
`$ /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/bin/conda-env export`
Traceback (most recent call last):
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/exceptions.py", line 403, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_env/cli/main_export.py", line 93, in execute
ignore_channels=args.ignore_prefix)
AttributeError: 'Namespace' object has no attribute 'ignore_prefix'
../manage/config/environment-anabase-1.0.0.yml (END)
```
Here is the environment, if this matters - there is one pip in there, nose2
```
(anabase) (psreldev) psel701: /reg/g/psdm/sw/conda/logs $ conda list
# packages in environment at /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/anabase:
#
backports 1.0 py27_0
boost 1.57.0 4
cairo 1.12.18 6
coverage 4.1 py27_0
cycler 0.10.0 py27_0
cython 0.24.1 py27_0
decorator 4.0.10 py27_0
szip 2.1 100 file:///reg/g/psdm/sw/conda/channels/external-rhel7
h5py 2.5.0 py27_hdf518_mpi4py2_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
hdf5 1.8.17 openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
mpi4py 2.0.0 py27_openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
openmpi 1.10.3 lsf_verbs_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
tables 3.2.3.1 py27_hdf18_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
fontconfig 2.11.1 6
freetype 2.5.5 1
get_terminal_size 1.0.0 py27_0
glib 2.43.0 1
harfbuzz 0.9.39 1
icu 54.1 0
ipython 5.0.0 py27_0
ipython_genutils 0.1.0 py27_0
jbig 2.1 0
jinja2 2.8 py27_1
jpeg 8d 1
libffi 3.2.1 0
libgfortran 3.0.0 1
libpng 1.6.22 0
libsodium 1.0.10 0
libtiff 4.0.6 2
libxml2 2.9.2 0
markupsafe 0.23 py27_2
matplotlib 1.5.1 np111py27_0
mkl 11.3.3 0
mysql 5.5.24 0
networkx 1.11 py27_0
nose 1.3.7 py27_1
nose2 0.6.5 <pip>
numexpr 2.6.1 np111py27_0
numpy 1.11.1 py27_0
openssl 1.0.2h 1
pandas 0.18.1 np111py27_0
pango 1.39.0 1
path.py 8.2.1 py27_0
pathlib2 2.1.0 py27_0
pexpect 4.0.1 py27_0
pickleshare 0.7.3 py27_0
pillow 3.3.0 py27_0
pip 8.1.2 py27_0
pixman 0.32.6 0
prompt_toolkit 1.0.3 py27_0
ptyprocess 0.5.1 py27_0
pycairo 1.10.0 py27_0
pygments 2.1.3 py27_0
pyparsing 2.1.4 py27_0
pyqt 4.11.4 py27_4
pyqtgraph 0.9.10 py27_1
python 2.7.12 1
python-dateutil 2.5.3 py27_0
pytz 2016.6.1 py27_0
pyzmq 15.3.0 py27_0
qt 4.8.5 0
readline 6.2 2
lmfit 0.8.3 py27_0 scikit-beam
scikit-beam 0.0.8 py27_0 scikit-beam
xraylib 3.1.0 nppy27_0 scikit-beam
scikit-image 0.12.3 np111py27_1
scikit-learn 0.17.1 np111py27_2
scipy 0.18.0 np111py27_0
scons 2.3.0 py27_0
setuptools 23.0.0 py27_0
simplegeneric 0.8.1 py27_1
sip 4.18 py27_0
six 1.10.0 py27_0
sqlite 3.13.0 0
tk 8.5.18 0
traitlets 4.2.2 py27_0
wcwidth 0.1.7 py27_0
wheel 0.29.0 py27_0
xz 5.2.2 0
zeromq 4.1.4 0
zlib 1.2.8 3
```
| conda_env/cli/main_export.py
<|code_start|>
from __future__ import absolute_import, print_function
import os
import textwrap
from argparse import RawDescriptionHelpFormatter
from conda import config
from ..env import from_environment
# conda env import
from conda_env.cli.common import error_and_exit, get_prefix
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
p.add_argument(
'-c', '--channel',
action='append',
help='Additional channel to include in the export'
)
p.add_argument(
"--override-channels",
action="store_true",
help="Do not include .condarc channels",
)
p.add_argument(
'-n', '--name',
action='store',
help='name of environment (in %s)' % os.pathsep.join(config.envs_dirs),
default=None,
)
p.add_argument(
'-f', '--file',
default=None,
required=False
)
p.add_argument(
'--no-builds',
default=False,
action='store_true',
required=False,
help='Remove build specification from dependencies'
)
p.add_argument(
'--ignore-channels',
default=False,
action='store_true',
required=False,
help='Do not include channel names with package names.')
p.set_defaults(func=execute)
# TODO Make this aware of channels that were used to install packages
def execute(args, parser):
if not args.name:
# Note, this is a hack fofr get_prefix that assumes argparse results
# TODO Refactor common.get_prefix
name = os.environ.get('CONDA_DEFAULT_ENV', False)
if not name:
msg = "Unable to determine environment\n\n"
msg += textwrap.dedent("""
Please re-run this command with one of the following options:
* Provide an environment name via --name or -n
* Re-run this command inside an activated conda environment.""").lstrip()
# TODO Add json support
error_and_exit(msg, json=False)
args.name = name
else:
name = args.name
prefix = get_prefix(args)
env = from_environment(name, prefix, no_builds=args.no_builds,
ignore_channels=args.ignore_prefix)
if args.override_channels:
env.remove_channels()
if args.channel is not None:
env.add_channels(args.channel)
if args.file is None:
print(env.to_yaml())
else:
fp = open(args.file, 'wb')
env.to_yaml(stream=fp)
<|code_end|>
| conda_env/cli/main_export.py
from __future__ import absolute_import, print_function
import os
import textwrap
from argparse import RawDescriptionHelpFormatter
from conda import config
from ..env import from_environment
# conda env import
from conda_env.cli.common import error_and_exit, get_prefix
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
p.add_argument(
'-c', '--channel',
action='append',
help='Additional channel to include in the export'
)
p.add_argument(
"--override-channels",
action="store_true",
help="Do not include .condarc channels",
)
p.add_argument(
'-n', '--name',
action='store',
help='name of environment (in %s)' % os.pathsep.join(config.envs_dirs),
default=None,
)
p.add_argument(
'-f', '--file',
default=None,
required=False
)
p.add_argument(
'--no-builds',
default=False,
action='store_true',
required=False,
help='Remove build specification from dependencies'
)
p.add_argument(
'--ignore-channels',
default=False,
action='store_true',
required=False,
help='Do not include channel names with package names.')
p.set_defaults(func=execute)
# TODO Make this aware of channels that were used to install packages
def execute(args, parser):
if not args.name:
# Note, this is a hack fofr get_prefix that assumes argparse results
# TODO Refactor common.get_prefix
name = os.environ.get('CONDA_DEFAULT_ENV', False)
if not name:
msg = "Unable to determine environment\n\n"
msg += textwrap.dedent("""
Please re-run this command with one of the following options:
* Provide an environment name via --name or -n
* Re-run this command inside an activated conda environment.""").lstrip()
# TODO Add json support
error_and_exit(msg, json=False)
args.name = name
else:
name = args.name
prefix = get_prefix(args)
env = from_environment(name, prefix, no_builds=args.no_builds,
ignore_channels=args.ignore_channels)
if args.override_channels:
env.remove_channels()
if args.channel is not None:
env.add_channels(args.channel)
if args.file is None:
print(env.to_yaml())
else:
fp = open(args.file, 'wb')
env.to_yaml(stream=fp)
| conda_env/cli/main_export.py
--- a/conda_env/cli/main_export.py
+++ b/conda_env/cli/main_export.py
@@ -90,7 +90,7 @@ def execute(args, parser):
name = args.name
prefix = get_prefix(args)
env = from_environment(name, prefix, no_builds=args.no_builds,
- ignore_channels=args.ignore_prefix)
+ ignore_channels=args.ignore_channels)
if args.override_channels:
env.remove_channels() |
[Regression] Conda create environment fails on lock if root environment is not under user control
This is "funny", but it seems that Conda managed to break this thing the second time in a month... #2681 was the previous one.
This time, I get the following error:
```
$ conda create -n _root --yes --use-index-cache python=3
...
Traceback (most recent call last):
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/exceptions.py", line 442, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/main_create.py", line 66, in execute
install(args, parser, 'create')
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/install.py", line 399, in install
execute_actions(actions, index, verbose=not args.quiet)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/plan.py", line 640, in execute_actions
inst.execute_instructions(plan, index, verbose)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/instructions.py", line 132, in execute_instructions
cmd(state, arg)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/instructions.py", line 77, in LINK_CMD
link(state['prefix'], dist, lt, index=state['index'])
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/install.py", line 1060, in link
with DirectoryLock(prefix), FileLock(source_dir):
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 86, in __enter__
touch(self.lock_file_path)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 48, in touch
with open(file_name, 'a'):
PermissionError: [Errno 13] Permission denied: '/usr/local/miniconda/pkgs/openssl-1.0.2h-1.pid34.conda_lock'
```
```
Current conda install:
platform : linux-64
conda version : 4.2.2
conda is private : False
conda-env version : 2.6.0
conda-build version : 1.21.11+0.g5b44ab3.dirty
python version : 3.5.2.final.0
requests version : 2.10.0
root environment : /usr/local/miniconda (read only)
default environment : /usr/local/miniconda
envs directories : /home/gitlab-ci/.conda/envs
/usr/local/miniconda/envs
package cache : /home/gitlab-ci/.conda/envs/.pkgs
/usr/local/miniconda/pkgs
channel URLs : defaults
config file : None
offline mode : False
```
/CC @kalefranz
[Regression] Conda create environment fails on lock if root environment is not under user control
This is "funny", but it seems that Conda managed to break this thing the second time in a month... #2681 was the previous one.
This time, I get the following error:
```
$ conda create -n _root --yes --use-index-cache python=3
...
Traceback (most recent call last):
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/exceptions.py", line 442, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/main_create.py", line 66, in execute
install(args, parser, 'create')
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/install.py", line 399, in install
execute_actions(actions, index, verbose=not args.quiet)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/plan.py", line 640, in execute_actions
inst.execute_instructions(plan, index, verbose)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/instructions.py", line 132, in execute_instructions
cmd(state, arg)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/instructions.py", line 77, in LINK_CMD
link(state['prefix'], dist, lt, index=state['index'])
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/install.py", line 1060, in link
with DirectoryLock(prefix), FileLock(source_dir):
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 86, in __enter__
touch(self.lock_file_path)
File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 48, in touch
with open(file_name, 'a'):
PermissionError: [Errno 13] Permission denied: '/usr/local/miniconda/pkgs/openssl-1.0.2h-1.pid34.conda_lock'
```
```
Current conda install:
platform : linux-64
conda version : 4.2.2
conda is private : False
conda-env version : 2.6.0
conda-build version : 1.21.11+0.g5b44ab3.dirty
python version : 3.5.2.final.0
requests version : 2.10.0
root environment : /usr/local/miniconda (read only)
default environment : /usr/local/miniconda
envs directories : /home/gitlab-ci/.conda/envs
/usr/local/miniconda/envs
package cache : /home/gitlab-ci/.conda/envs/.pkgs
/usr/local/miniconda/pkgs
channel URLs : defaults
config file : None
offline mode : False
```
/CC @kalefranz
| conda/lock.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
"""
Tools for working with locks
A lock is just an empty directory. We use directories because this lets us use
the race condition-proof os.makedirs.
For now, there is one global lock for all of conda, because some things happen
globally (such as downloading packages).
We don't raise an error if the lock is named with the current PID
"""
from __future__ import absolute_import, division, print_function
import logging
import os
import time
from glob import glob
from os.path import abspath, isdir, dirname, basename, join
from .compat import range
from .exceptions import LockError
LOCK_EXTENSION = 'conda_lock'
# Keep the string "LOCKERROR" in this string so that external
# programs can look for it.
LOCKSTR = """
LOCKERROR: It looks like conda is already doing something.
The lock {0} was found. Wait for it to finish before continuing.
If you are sure that conda is not running, remove it and try again.
You can also use: $ conda clean --lock
"""
stdoutlog = logging.getLogger('stdoutlog')
log = logging.getLogger(__name__)
def touch(file_name, times=None):
""" Touch function like touch in Unix shell
:param file_name: the name of file
:param times: the access and modified time
Examples:
touch("hello_world.py")
"""
with open(file_name, 'a'):
os.utime(file_name, times)
class FileLock(object):
"""Lock a path (file or directory) with the lock file sitting *beside* path.
:param path_to_lock: the path to be locked
:param retries: max number of retries
"""
def __init__(self, path_to_lock, retries=10):
"""
"""
self.path_to_lock = abspath(path_to_lock)
self.retries = retries
self.lock_file_path = "%s.pid{0}.%s" % (self.path_to_lock, LOCK_EXTENSION)
# e.g. if locking path `/conda`, lock file will be `/conda.pidXXXX.conda_lock`
self.lock_file_glob_str = "%s.pid*.%s" % (self.path_to_lock, LOCK_EXTENSION)
assert isdir(dirname(self.path_to_lock)), "{0} doesn't exist".format(self.path_to_lock)
assert "::" not in self.path_to_lock, self.path_to_lock
def __enter__(self):
sleep_time = 1
self.lock_file_path = self.lock_file_path.format(os.getpid())
last_glob_match = None
for _ in range(self.retries + 1):
# search, whether there is process already locked on this file
glob_result = glob(self.lock_file_glob_str)
if glob_result:
log.debug(LOCKSTR.format(glob_result))
log.debug("Sleeping for %s seconds\n" % sleep_time)
time.sleep(sleep_time / 10)
sleep_time *= 2
last_glob_match = glob_result
else:
touch(self.lock_file_path)
return self
stdoutlog.error("Exceeded max retries, giving up")
raise LockError(LOCKSTR.format(last_glob_match))
def __exit__(self, exc_type, exc_value, traceback):
from .install import rm_rf
rm_rf(self.lock_file_path)
class DirectoryLock(FileLock):
"""Lock a directory with the lock file sitting *within* the directory being locked.
Useful when, for example, locking the root prefix at ``/conda``, and ``/`` is not writable.
:param directory_path: the path to be locked
:param retries: max number of retries
"""
def __init__(self, directory_path, retries=10):
self.directory_path = abspath(directory_path)
directory_name = basename(self.directory_path)
self.retries = retries
lock_path_pre = join(self.directory_path, directory_name)
self.lock_file_path = "%s.pid{0}.%s" % (lock_path_pre, LOCK_EXTENSION)
# e.g. if locking directory `/conda`, lock file will be `/conda/conda.pidXXXX.conda_lock`
self.lock_file_glob_str = "%s.pid*.%s" % (lock_path_pre, LOCK_EXTENSION)
assert isdir(dirname(self.directory_path)), "{0} doesn't exist".format(self.directory_path)
if not isdir(self.directory_path):
os.makedirs(self.directory_path, exist_ok=True)
log.debug("forced to create %s", self.directory_path)
assert os.access(self.directory_path, os.W_OK), "%s not writable" % self.directory_path
Locked = DirectoryLock
<|code_end|>
| conda/lock.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
"""
Tools for working with locks
A lock is just an empty directory. We use directories because this lets us use
the race condition-proof os.makedirs.
For now, there is one global lock for all of conda, because some things happen
globally (such as downloading packages).
We don't raise an error if the lock is named with the current PID
"""
from __future__ import absolute_import, division, print_function
import logging
import os
import time
from glob import glob
from os.path import abspath, isdir, dirname, basename, join
from .compat import range
from .exceptions import LockError
LOCK_EXTENSION = 'conda_lock'
# Keep the string "LOCKERROR" in this string so that external
# programs can look for it.
LOCKSTR = """
LOCKERROR: It looks like conda is already doing something.
The lock {0} was found. Wait for it to finish before continuing.
If you are sure that conda is not running, remove it and try again.
You can also use: $ conda clean --lock
"""
stdoutlog = logging.getLogger('stdoutlog')
log = logging.getLogger(__name__)
def touch(file_name, times=None):
""" Touch function like touch in Unix shell
:param file_name: the name of file
:param times: the access and modified time
Examples:
touch("hello_world.py")
"""
try:
with open(file_name, 'a'):
os.utime(file_name, times)
except (OSError, IOError) as e:
log.warn("Failed to create lock, do not run conda in parallel process\n")
class FileLock(object):
"""Lock a path (file or directory) with the lock file sitting *beside* path.
:param path_to_lock: the path to be locked
:param retries: max number of retries
"""
def __init__(self, path_to_lock, retries=10):
"""
"""
self.path_to_lock = abspath(path_to_lock)
self.retries = retries
self.lock_file_path = "%s.pid{0}.%s" % (self.path_to_lock, LOCK_EXTENSION)
# e.g. if locking path `/conda`, lock file will be `/conda.pidXXXX.conda_lock`
self.lock_file_glob_str = "%s.pid*.%s" % (self.path_to_lock, LOCK_EXTENSION)
assert isdir(dirname(self.path_to_lock)), "{0} doesn't exist".format(self.path_to_lock)
assert "::" not in self.path_to_lock, self.path_to_lock
def __enter__(self):
sleep_time = 1
self.lock_file_path = self.lock_file_path.format(os.getpid())
last_glob_match = None
for _ in range(self.retries + 1):
# search, whether there is process already locked on this file
glob_result = glob(self.lock_file_glob_str)
if glob_result:
log.debug(LOCKSTR.format(glob_result))
log.debug("Sleeping for %s seconds\n" % sleep_time)
time.sleep(sleep_time / 10)
sleep_time *= 2
last_glob_match = glob_result
else:
touch(self.lock_file_path)
return self
stdoutlog.error("Exceeded max retries, giving up")
raise LockError(LOCKSTR.format(last_glob_match))
def __exit__(self, exc_type, exc_value, traceback):
from .install import rm_rf
rm_rf(self.lock_file_path)
class DirectoryLock(FileLock):
"""Lock a directory with the lock file sitting *within* the directory being locked.
Useful when, for example, locking the root prefix at ``/conda``, and ``/`` is not writable.
:param directory_path: the path to be locked
:param retries: max number of retries
"""
def __init__(self, directory_path, retries=10):
self.directory_path = abspath(directory_path)
directory_name = basename(self.directory_path)
self.retries = retries
lock_path_pre = join(self.directory_path, directory_name)
self.lock_file_path = "%s.pid{0}.%s" % (lock_path_pre, LOCK_EXTENSION)
# e.g. if locking directory `/conda`, lock file will be `/conda/conda.pidXXXX.conda_lock`
self.lock_file_glob_str = "%s.pid*.%s" % (lock_path_pre, LOCK_EXTENSION)
# make sure '/' exists
assert isdir(dirname(self.directory_path)), "{0} doesn't exist".format(self.directory_path)
if not isdir(self.directory_path):
try:
os.makedirs(self.directory_path)
log.debug("forced to create %s", self.directory_path)
except (OSError, IOError) as e:
log.warn("Failed to create directory %s" % self.directory_path)
Locked = DirectoryLock
| conda/lock.py
--- a/conda/lock.py
+++ b/conda/lock.py
@@ -45,8 +45,11 @@ def touch(file_name, times=None):
Examples:
touch("hello_world.py")
"""
- with open(file_name, 'a'):
- os.utime(file_name, times)
+ try:
+ with open(file_name, 'a'):
+ os.utime(file_name, times)
+ except (OSError, IOError) as e:
+ log.warn("Failed to create lock, do not run conda in parallel process\n")
class FileLock(object):
@@ -111,11 +114,13 @@ def __init__(self, directory_path, retries=10):
self.lock_file_path = "%s.pid{0}.%s" % (lock_path_pre, LOCK_EXTENSION)
# e.g. if locking directory `/conda`, lock file will be `/conda/conda.pidXXXX.conda_lock`
self.lock_file_glob_str = "%s.pid*.%s" % (lock_path_pre, LOCK_EXTENSION)
+ # make sure '/' exists
assert isdir(dirname(self.directory_path)), "{0} doesn't exist".format(self.directory_path)
if not isdir(self.directory_path):
- os.makedirs(self.directory_path, exist_ok=True)
- log.debug("forced to create %s", self.directory_path)
- assert os.access(self.directory_path, os.W_OK), "%s not writable" % self.directory_path
-
+ try:
+ os.makedirs(self.directory_path)
+ log.debug("forced to create %s", self.directory_path)
+ except (OSError, IOError) as e:
+ log.warn("Failed to create directory %s" % self.directory_path)
Locked = DirectoryLock |
URLs with :: are OK and should not raise assertion errors
For conda-build's perl skeleton generator, we can end up with URLs like:
http://api.metacpan.org/v0/module/Test::More
Unfortunately, conda prevents us from actually using those URLs:
```
File "/Users/msarahan/miniconda2/lib/python2.7/site-packages/conda/fetch.py", line 354, in download
assert "::" not in str(url), url
AssertionError: http://api.metacpan.org/v0/module/Test::More
```
Please partially revert https://github.com/conda/conda/commit/39605e01ccd05b5af5ebeceeacaafe652f4b32e4
| conda/fetch.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import, unicode_literals
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import tempfile
import warnings
from functools import wraps
from logging import getLogger, DEBUG
from os.path import basename, dirname, join
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
from ._vendor.auxlib.logz import stringify
from .base.context import context
from .common.url import add_username_and_pass_to_url, url_to_path
from .compat import itervalues, input, iteritems
from .connection import CondaSession, RETRIES
from .models.channel import Channel, offline_keep
from .exceptions import (ProxyError, CondaRuntimeError, CondaSignatureError, CondaHTTPError,
MD5MismatchError)
from .install import add_cached_package, find_new_location, package_cache, dist2pair, rm_rf
from .lock import FileLock
from .utils import exp_backoff_fn, memoized
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(context.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not offline_keep(url):
return {'packages': {}}
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
log.debug("Opening repodata cache for %s at %s", url, cache_path)
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not context.ssl_verify:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url or url.startswith("file://"):
filename = 'repodata.json.bz2'
headers['Accept-Encoding'] = 'identity'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
resp = session.get(url + filename, headers=headers, proxies=session.proxies,
timeout=(3.05, 60))
if log.isEnabledFor(DEBUG):
log.debug(stringify(resp))
resp.raise_for_status()
if resp.status_code != 304:
def get_json_str(filename, resp_content):
if filename.endswith('.bz2'):
return bz2.decompress(resp_content).decode('utf-8')
else:
return resp_content.decode('utf-8')
if url.startswith('file://'):
file_path = url_to_path(url)
with FileLock(dirname(file_path)):
json_str = get_json_str(filename, resp.content)
else:
json_str = get_json_str(filename, resp.content)
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise CondaRuntimeError("Invalid index file: {0}{1}: {2}"
.format(url, filename, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % url
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif e.response.status_code == 401 and context.channel_alias in url:
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(url, e))
stderrlog.info(msg)
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise CondaHTTPError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
if fail_unknown_host:
raise CondaRuntimeError(msg)
raise CondaRuntimeError(msg)
cache['_url'] = url
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
raise ProxyError("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif meta.get('url'):
url = meta['url']
elif meta.get('channel'):
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = Channel(url).url_channel_wtf
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
# if not isinstance(channel_urls, dict):
# channel_urls = prioritize_channels(channel_urls)
urls = tuple(filter(offline_keep, channel_urls))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError) as e:
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
log.debug(repr(e))
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
else:
try:
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
except RuntimeError as e:
# Cannot start new thread, then give up parallel execution
log.debug(repr(e))
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if context.add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except CondaSignatureError:
raise
raise CondaSignatureError("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None):
assert "::" not in str(url), url
assert "::" not in str(dst_path), str(dst_path)
if not offline_keep(url):
raise RuntimeError("Cannot download in offline mode: %s" % (url,))
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not context.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with FileLock(dst_path):
rm_rf(dst_path)
try:
resp = session.get(url, stream=True, proxies=session.proxies, timeout=(3.05, 27))
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise CondaRuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise CondaRuntimeError(msg)
except IOError as e:
raise CondaRuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
index = 0
for chunk in resp.iter_content(2**14):
index += len(chunk)
try:
fo.write(chunk)
except IOError:
raise CondaRuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
if size and 0 <= index <= size:
getLogger('fetch.update').info(index)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise CondaRuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise MD5MismatchError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
exp_backoff_fn(os.rename, pp, dst_path)
except OSError as e:
raise CondaRuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
<|code_end|>
| conda/fetch.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import, unicode_literals
import bz2
import getpass
import hashlib
import json
import os
import requests
import shutil
import tempfile
import warnings
from functools import wraps
from logging import getLogger, DEBUG
from os.path import basename, dirname, join
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
from ._vendor.auxlib.logz import stringify
from .base.context import context
from .common.url import add_username_and_pass_to_url, url_to_path
from .compat import itervalues, input, iteritems
from .connection import CondaSession, RETRIES
from .models.channel import Channel, offline_keep
from .exceptions import (ProxyError, CondaRuntimeError, CondaSignatureError, CondaHTTPError,
MD5MismatchError)
from .install import add_cached_package, find_new_location, package_cache, dist2pair, rm_rf
from .lock import FileLock
from .utils import exp_backoff_fn, memoized
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def create_cache_dir():
cache_dir = join(context.pkgs_dirs[0], 'cache')
try:
os.makedirs(cache_dir)
except OSError:
pass
return cache_dir
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not offline_keep(url):
return {'packages': {}}
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
log.debug("Opening repodata cache for %s at %s", url, cache_path)
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not context.ssl_verify:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url or url.startswith("file://"):
filename = 'repodata.json.bz2'
headers['Accept-Encoding'] = 'identity'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
resp = session.get(url + filename, headers=headers, proxies=session.proxies,
timeout=(3.05, 60))
if log.isEnabledFor(DEBUG):
log.debug(stringify(resp))
resp.raise_for_status()
if resp.status_code != 304:
def get_json_str(filename, resp_content):
if filename.endswith('.bz2'):
return bz2.decompress(resp_content).decode('utf-8')
else:
return resp_content.decode('utf-8')
if url.startswith('file://'):
file_path = url_to_path(url)
with FileLock(dirname(file_path)):
json_str = get_json_str(filename, resp.content)
else:
json_str = get_json_str(filename, resp.content)
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise CondaRuntimeError("Invalid index file: {0}{1}: {2}"
.format(url, filename, e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
if e.response.status_code == 404:
if url.endswith('/noarch/'): # noarch directory might not exist
return None
msg = 'Could not find URL: %s' % url
elif e.response.status_code == 403 and url.endswith('/noarch/'):
return None
elif e.response.status_code == 401 and context.channel_alias in url:
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
msg = ("Warning: you may need to login to anaconda.org again with "
"'anaconda login' to access private packages(%s, %s)" %
(url, e))
stderrlog.info(msg)
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
else:
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise CondaHTTPError(msg)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives this
# error and http gives the above error. Also, there is no status_code
# attribute here. We have to just check if it looks like 407. See
# https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
if fail_unknown_host:
raise CondaRuntimeError(msg)
raise CondaRuntimeError(msg)
cache['_url'] = url
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True)
except IOError:
pass
return cache or None
def handle_proxy_407(url, session):
"""
Prompts the user for the proxy username and password and modifies the
proxy in the session object to include it.
"""
# We could also use HTTPProxyAuth, but this does not work with https
# proxies (see https://github.com/kennethreitz/requests/issues/2061).
scheme = requests.packages.urllib3.util.url.parse_url(url).scheme
if scheme not in session.proxies:
raise ProxyError("""Could not find a proxy for %r. See
http://conda.pydata.org/docs/html#configure-conda-for-use-behind-a-proxy-server
for more information on how to configure proxies.""" % scheme)
username, passwd = get_proxy_username_and_pass(scheme)
session.proxies[scheme] = add_username_and_pass_to_url(
session.proxies[scheme], username, passwd)
@memoized
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass.getpass("Password:")
return username, passwd
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
schannel, dname = dist2pair(dist)
fname = dname + '.tar.bz2'
fkey = dist + '.tar.bz2'
if fkey in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = json.load(fi)
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif meta.get('url'):
url = meta['url']
elif meta.get('channel'):
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = Channel(url).url_channel_wtf
if schannel2 != schannel:
continue
priority = priorities.get(schannel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname, 'url': url, 'channel': channel,
'schannel': schannel, 'priority': priority})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % fkey)
index[fkey] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if (info['name'] == 'python' and
info['version'].startswith(('2.', '3.'))):
info.setdefault('depends', []).append('pip')
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
stdoutlog.info("Fetching package metadata ...")
# if not isinstance(channel_urls, dict):
# channel_urls = prioritize_channels(channel_urls)
urls = tuple(filter(offline_keep, channel_urls))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError) as e:
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
log.debug(repr(e))
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
else:
try:
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
except RuntimeError as e:
# Cannot start new thread, then give up parallel execution
log.debug(repr(e))
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
finally:
executor.shutdown(wait=True)
for channel, repodata in repodatas:
if repodata is None:
continue
new_index = repodata['packages']
url_s, priority = channel_urls[channel]
channel = channel.rstrip('/')
for fn, info in iteritems(new_index):
info['fn'] = fn
info['schannel'] = url_s
info['channel'] = channel
info['priority'] = priority
info['url'] = channel + '/' + fn
key = url_s + '::' + fn if url_s != 'defaults' else fn
index[key] = info
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if context.add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url')
if url is None:
url = info['channel'] + '/' + fn
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(fn[:-8])[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from .signature import verify
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except CondaSignatureError:
raise
raise CondaSignatureError("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None):
assert "::" not in str(dst_path), str(dst_path)
if not offline_keep(url):
raise RuntimeError("Cannot download in offline mode: %s" % (url,))
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not context.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with FileLock(dst_path):
rm_rf(dst_path)
try:
resp = session.get(url, stream=True, proxies=session.proxies, timeout=(3.05, 27))
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
if e.response.status_code == 407: # Proxy Authentication Required
handle_proxy_407(url, session)
# Try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise CondaRuntimeError(msg)
except requests.exceptions.ConnectionError as e:
# requests isn't so nice here. For whatever reason, https gives
# this error and http gives the above error. Also, there is no
# status_code attribute here. We have to just check if it looks
# like 407.
# See: https://github.com/kennethreitz/requests/issues/2061.
if "407" in str(e): # Proxy Authentication Required
handle_proxy_407(url, session)
# try again
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries)
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise CondaRuntimeError(msg)
except IOError as e:
raise CondaRuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
index = 0
for chunk in resp.iter_content(2**14):
index += len(chunk)
try:
fo.write(chunk)
except IOError:
raise CondaRuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
if size and 0 <= index <= size:
getLogger('fetch.update').info(index)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise CondaRuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise MD5MismatchError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
exp_backoff_fn(os.rename, pp, dst_path)
except OSError as e:
raise CondaRuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
class TmpDownload(object):
"""
Context manager to handle downloads to a tempfile
"""
def __init__(self, url, verbose=True):
self.url = url
self.verbose = verbose
def __enter__(self):
if '://' not in self.url:
# if we provide the file itself, no tmp dir is created
self.tmp_dir = None
return self.url
else:
if self.verbose:
from .console import setup_handlers
setup_handlers()
self.tmp_dir = tempfile.mkdtemp()
dst = join(self.tmp_dir, basename(self.url))
download(self.url, dst)
return dst
def __exit__(self, exc_type, exc_value, traceback):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir)
| conda/fetch.py
--- a/conda/fetch.py
+++ b/conda/fetch.py
@@ -351,7 +351,6 @@ def fetch_pkg(info, dst_dir=None, session=None):
def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None):
- assert "::" not in str(url), url
assert "::" not in str(dst_path), str(dst_path)
if not offline_keep(url):
raise RuntimeError("Cannot download in offline mode: %s" % (url,)) |
CONDA_CHANNELS environment variable doesn't work
fixed with #3390
| conda/common/configuration.py
<|code_start|>
# -*- coding: utf-8 -*-
"""
A generalized application configuration utility.
Features include:
- lazy eval
- merges configuration files
- parameter type validation, with custom validation
- parameter aliases
Easily extensible to other source formats, e.g. json and ini
Limitations:
- at the moment only supports a "flat" config structure; no nested data structures
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
from collections import Mapping, Set, defaultdict
from enum import Enum
from glob import glob
from itertools import chain
from logging import getLogger
from os import environ, stat
from os.path import join
from stat import S_IFDIR, S_IFMT, S_IFREG
try:
from cytoolz.dicttoolz import merge
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concat, concatv, unique
except ImportError:
from .._vendor.toolz.dicttoolz import merge
from .._vendor.toolz.functoolz import excepts
from .._vendor.toolz.itertoolz import concat, concatv, unique
try:
from ruamel_yaml.comments import CommentedSeq, CommentedMap
except ImportError: # pragma: no cover
from ruamel.yaml.comments import CommentedSeq, CommentedMap # pragma: no cover
from .. import CondaError, CondaMultiError
from .._vendor.auxlib.collection import first, frozendict, last, AttrDict
from .._vendor.auxlib.exceptions import ThisShouldNeverHappenError
from .._vendor.auxlib.path import expand
from .._vendor.auxlib.type_coercion import typify_data_structure, TypeCoercionError
from ..base.constants import EMPTY_MAP, NULL
from .compat import (isiterable, iteritems, odict, primitive_types, text_type,
with_metaclass, string_types, itervalues)
from .yaml import yaml_load
__all__ = ["Configuration", "PrimitiveParameter",
"SequenceParameter", "MapParameter"]
log = getLogger(__name__)
def pretty_list(iterable, padding=' '): # TODO: move elsewhere in conda.common
if not isiterable(iterable):
iterable = [iterable]
return '\n'.join("%s- %s" % (padding, item) for item in iterable)
def pretty_map(dictionary, padding=' '):
return '\n'.join("%s%s: %s" % (padding, key, value) for key, value in iteritems(dictionary))
class ConfigurationError(CondaError):
pass
class ValidationError(ConfigurationError):
def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs):
self.parameter_name = parameter_name
self.parameter_value = parameter_value
self.source = source
super(ConfigurationError, self).__init__(msg, **kwargs)
def __str__(self):
return ("Parameter %s = %r declared in %s is invalid."
% (self.parameter_name, self.parameter_value, self.source))
class MultipleKeysError(ValidationError):
def __init__(self, source, keys, preferred_key):
self.source = source
self.keys = keys
msg = ("Multiple aliased keys in file %s:\n"
"%s"
"Must declare only one. Prefer '%s'" % (source, pretty_list(keys), preferred_key))
super(MultipleKeysError, self).__init__(preferred_key, None, source, msg=msg)
class InvalidTypeError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None):
self.wrong_type = wrong_type
self.valid_types = valid_types
if msg is None:
msg = ("Parameter %s = %r declared in %s has type %s.\n"
"Valid types: %s." % (parameter_name, parameter_value,
source, wrong_type, pretty_list(valid_types)))
super(InvalidTypeError, self).__init__(parameter_name, parameter_value, source, msg=msg)
class InvalidElementTypeError(InvalidTypeError):
def __init__(self, parameter_name, parameter_value, source, wrong_type,
valid_types, index_or_key):
qualifier = "at index" if isinstance(index_or_key, int) else "for key"
msg = ("Parameter %s declared in %s has invalid element %r %s %s.\n"
"Valid element types:\n"
"%s." % (parameter_name, source, parameter_value, qualifier,
index_or_key, pretty_list(valid_types)))
super(InvalidElementTypeError, self).__init__(parameter_name, parameter_value, source,
wrong_type, valid_types, msg=msg)
class CustomValidationError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, custom_message):
msg = ("Parameter %s = %r declared in %s is invalid.\n"
"%s" % (parameter_name, parameter_value, source, custom_message))
super(CustomValidationError, self).__init__(parameter_name, parameter_value, source,
msg=msg)
class MultiValidationError(CondaMultiError, ConfigurationError):
def __init__(self, errors, *args, **kwargs):
super(MultiValidationError, self).__init__(errors, *args, **kwargs)
def raise_errors(errors):
if not errors:
return True
elif len(errors) == 1:
raise errors[0]
else:
raise MultiValidationError(errors)
class ParameterFlag(Enum):
final = 'final'
top = "top"
bottom = "bottom"
def __str__(self):
return "%s" % self.value
@classmethod
def from_name(cls, name):
return cls[name]
@classmethod
def from_value(cls, value):
return cls(value)
@classmethod
def from_string(cls, string):
try:
string = string.strip('!#')
return cls.from_value(string)
except (ValueError, AttributeError):
return None
# TODO: move elsewhere, probably auxlib
# TODO: need to add order to at least frozendict, and preferrably frozenset
def make_immutable(value):
if isinstance(value, Mapping):
return frozendict(value)
elif isinstance(value, Set):
return frozenset(value)
elif isiterable(value):
return tuple(value)
else:
return value
@with_metaclass(ABCMeta)
class RawParameter(object):
def __init__(self, source, key, raw_value):
self.source = source
self.key = key
self._raw_value = raw_value
def __repr__(self):
return text_type(vars(self))
@abstractmethod
def value(self, parameter_type):
raise NotImplementedError()
@abstractmethod
def keyflag(self):
raise NotImplementedError()
@abstractmethod
def valueflags(self, parameter_type):
raise NotImplementedError()
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key])) for key in from_map)
return EMPTY_MAP
class EnvRawParameter(RawParameter):
source = 'envvars'
def value(self, parameter_type):
return self.__important_split_value[0].strip()
def keyflag(self):
return ParameterFlag.final if len(self.__important_split_value) >= 2 else None
def valueflags(self, parameter_type):
return None
@property
def __important_split_value(self):
return self._raw_value.split("!important")
@classmethod
def make_raw_parameters(cls, appname):
keystart = "{0}_".format(appname.upper())
raw_env = dict((k.replace(keystart, '').lower(), v)
for k, v in iteritems(environ) if k.startswith(keystart))
return super(EnvRawParameter, cls).make_raw_parameters(EnvRawParameter.source, raw_env)
class ArgParseRawParameter(RawParameter):
source = 'cmd_line'
def value(self, parameter_type):
return make_immutable(self._raw_value)
def keyflag(self):
return None
def valueflags(self, parameter_type):
return None
@classmethod
def make_raw_parameters(cls, args_from_argparse):
return super(ArgParseRawParameter, cls).make_raw_parameters(ArgParseRawParameter.source,
args_from_argparse)
class YamlRawParameter(RawParameter):
# this class should encapsulate all direct use of ruamel.yaml in this module
def __init__(self, source, key, raw_value, keycomment):
self._keycomment = keycomment
super(YamlRawParameter, self).__init__(source, key, raw_value)
def value(self, parameter_type):
self.__process(parameter_type)
return self._value
def keyflag(self):
return ParameterFlag.from_string(self._keycomment)
def valueflags(self, parameter_type):
self.__process(parameter_type)
return self._valueflags
def __process(self, parameter_type):
if hasattr(self, '_value'):
return
elif isinstance(self._raw_value, CommentedSeq):
valuecomments = self._get_yaml_list_comments(self._raw_value)
self._valueflags = tuple(ParameterFlag.from_string(s) for s in valuecomments)
self._value = tuple(self._raw_value)
elif isinstance(self._raw_value, CommentedMap):
valuecomments = self._get_yaml_map_comments(self._raw_value)
self._valueflags = dict((k, ParameterFlag.from_string(v))
for k, v in iteritems(valuecomments) if v is not None)
self._value = frozendict(self._raw_value)
elif isinstance(self._raw_value, primitive_types):
self._valueflags = None
self._value = self._raw_value
else:
raise ThisShouldNeverHappenError() # pragma: no cover
@staticmethod
def _get_yaml_key_comment(commented_dict, key):
try:
return commented_dict.ca.items[key][2].value.strip()
except (AttributeError, KeyError):
return None
@staticmethod
def _get_yaml_list_comments(value):
items = value.ca.items
raw_comment_lines = tuple(excepts((AttributeError, KeyError, TypeError),
lambda q: items.get(q)[0].value.strip() or None,
lambda _: None # default value on exception
)(q)
for q in range(len(value)))
return raw_comment_lines
@staticmethod
def _get_yaml_map_comments(rawvalue):
return dict((key, excepts(KeyError,
lambda k: rawvalue.ca.items[k][2].value.strip() or None,
lambda _: None # default value on exception
)(key))
for key in rawvalue)
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key],
cls._get_yaml_key_comment(from_map, key)))
for key in from_map)
return EMPTY_MAP
@classmethod
def make_raw_parameters_from_file(cls, filepath):
with open(filepath, 'r') as fh:
ruamel_yaml = yaml_load(fh)
return cls.make_raw_parameters(filepath, ruamel_yaml) or EMPTY_MAP
def load_file_configs(search_path):
# returns an ordered map of filepath and dict of raw parameter objects
def _file_yaml_loader(fullpath):
assert fullpath.endswith(".yml") or fullpath.endswith("condarc"), fullpath
yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath)
def _dir_yaml_loader(fullpath):
for filepath in glob(join(fullpath, "*.yml")):
yield filepath, YamlRawParameter.make_raw_parameters_from_file(filepath)
# map a stat result to a file loader or a directory loader
_loader = {
S_IFREG: _file_yaml_loader,
S_IFDIR: _dir_yaml_loader,
}
def _get_st_mode(path):
# stat the path for file type, or None if path doesn't exist
try:
return S_IFMT(stat(path).st_mode)
except OSError:
return None
expanded_paths = tuple(expand(path) for path in search_path)
stat_paths = (_get_st_mode(path) for path in expanded_paths)
load_paths = (_loader[st_mode](path)
for path, st_mode in zip(expanded_paths, stat_paths)
if st_mode is not None)
raw_data = odict(kv for kv in chain.from_iterable(load_paths))
return raw_data
@with_metaclass(ABCMeta)
class Parameter(object):
_type = None
_element_type = None
def __init__(self, default, aliases=(), validation=None):
self._name = None
self._names = None
self.default = default
self.aliases = aliases
self._validation = validation
def _set_name(self, name):
# this is an explicit method, and not a descriptor/setter
# it's meant to be called by the Configuration metaclass
self._name = name
self._names = frozenset(x for x in chain(self.aliases, (name, )))
return name
@property
def name(self):
if self._name is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._name
@property
def names(self):
if self._names is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._names
def _raw_parameters_from_single_source(self, raw_parameters):
# while supporting parameter name aliases, we enforce that only one definition is given
# per data source
keys = self.names & frozenset(raw_parameters.keys())
matches = {key: raw_parameters[key] for key in keys}
numkeys = len(keys)
if numkeys == 0:
return None, None
elif numkeys == 1:
return next(itervalues(matches)), None
elif self.name in keys:
return matches[self.name], MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
else:
return None, MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
def _get_all_matches(self, instance):
# a match is a raw parameter instance
matches = []
multikey_exceptions = []
for filepath, raw_parameters in iteritems(instance.raw_data):
match, error = self._raw_parameters_from_single_source(raw_parameters)
if match is not None:
matches.append(match)
if error:
multikey_exceptions.append(error)
return matches, multikey_exceptions
@abstractmethod
def _merge(self, matches):
raise NotImplementedError()
def __get__(self, instance, instance_type):
# strategy is "extract and merge," which is actually just map and reduce
# extract matches from each source in SEARCH_PATH
# then merge matches together
if self.name in instance._cache:
return instance._cache[self.name]
matches, errors = self._get_all_matches(instance)
try:
result = typify_data_structure(self._merge(matches) if matches else self.default,
self._element_type)
except TypeCoercionError as e:
errors.append(CustomValidationError(self.name, e.value, "<<merged>>", text_type(e)))
else:
errors.extend(self.collect_errors(instance, result))
raise_errors(errors)
instance._cache[self.name] = result
return result
def collect_errors(self, instance, value, source="<<merged>>"):
"""Validate a Parameter value.
Args:
instance (Configuration): The instance object to which the Parameter descriptor is
attached.
value: The value to be validated.
"""
errors = []
if not isinstance(value, self._type):
errors.append(InvalidTypeError(self.name, value, source, type(value),
self._type))
elif self._validation is not None:
result = self._validation(value)
if result is False:
errors.append(ValidationError(self.name, value, source))
elif isinstance(result, string_types):
errors.append(CustomValidationError(self.name, value, source, result))
return errors
def _match_key_is_important(self, raw_parameter):
return raw_parameter.keyflag() is ParameterFlag.final
def _first_important_matches(self, matches):
idx = first(enumerate(matches), lambda x: self._match_key_is_important(x[1]),
apply=lambda x: x[0])
return matches if idx is None else matches[:idx+1]
@staticmethod
def _str_format_flag(flag):
return " #!%s" % flag if flag is not None else ''
@staticmethod
def _str_format_value(value):
if value is None:
return 'None'
return value
@classmethod
def repr_raw(cls, raw_parameter):
raise NotImplementedError()
class PrimitiveParameter(Parameter):
"""Parameter type for a Configuration class that holds a single python primitive value.
The python primitive types are str, int, float, complex, bool, and NoneType. In addition,
python 2 has long and unicode types.
"""
def __init__(self, default, aliases=(), validation=None, parameter_type=None):
"""
Args:
default (Any): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
parameter_type (type or Tuple[type]): Type-validation of parameter's value. If None,
type(default) is used.
"""
self._type = type(default) if parameter_type is None else parameter_type
self._element_type = self._type
super(PrimitiveParameter, self).__init__(default, aliases, validation)
def _merge(self, matches):
important_match = first(matches, self._match_key_is_important, default=None)
if important_match is not None:
return important_match.value(self.__class__)
last_match = last(matches, lambda x: x is not None, default=None)
if last_match is not None:
return last_match.value(self.__class__)
raise ThisShouldNeverHappenError() # pragma: no cover
@classmethod
def repr_raw(cls, raw_parameter):
return "%s: %s%s" % (raw_parameter.key,
cls._str_format_value(raw_parameter.value(cls)),
cls._str_format_flag(raw_parameter.keyflag()))
class SequenceParameter(Parameter):
"""Parameter type for a Configuration class that holds a sequence (i.e. list) of python
primitive values.
"""
_type = tuple
def __init__(self, element_type, default=(), aliases=(), validation=None):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element in
the sequence.
default (Iterable[str]): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
super(SequenceParameter, self).__init__(default, aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(SequenceParameter, self).collect_errors(instance, value)
element_type = self._element_type
for idx, element in enumerate(value):
if not isinstance(element, element_type):
errors.append(InvalidElementTypeError(self.name, element, source,
type(element), element_type, idx))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# get individual lines from important_matches that were marked important
# these will be prepended to the final result
def get_marked_lines(match, marker):
return tuple(line
for line, flag in zip(match.value(self.__class__),
match.valueflags(self.__class__))
if flag is marker)
top_lines = concat(get_marked_lines(m, ParameterFlag.top) for m in relevant_matches)
# also get lines that were marked as bottom, but reverse the match order so that lines
# coming earlier will ultimately be last
bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom) for m in
reversed(relevant_matches))
# now, concat all lines, while reversing the matches
# reverse because elements closer to the end of search path take precedence
all_lines = concat(m.value(self.__class__) for m in reversed(relevant_matches))
# stack top_lines + all_lines, then de-dupe
top_deduped = tuple(unique(concatv(top_lines, all_lines)))
# take the top-deduped lines, reverse them, and concat with reversed bottom_lines
# this gives us the reverse of the order we want, but almost there
# NOTE: for a line value marked both top and bottom, the bottom marker will win out
# for the top marker to win out, we'd need one additional de-dupe step
bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))
# just reverse, and we're good to go
return tuple(reversed(tuple(bottom_deduped)))
@classmethod
def repr_raw(cls, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
cls._str_format_flag(raw_parameter.keyflag())))
for q, value in enumerate(raw_parameter.value(cls)):
valueflag = raw_parameter.valueflags(cls)[q]
lines.append(" - %s%s" % (cls._str_format_value(value),
cls._str_format_flag(valueflag)))
return '\n'.join(lines)
class MapParameter(Parameter):
"""Parameter type for a Configuration class that holds a map (i.e. dict) of python
primitive values.
"""
_type = dict
def __init__(self, element_type, default=None, aliases=(), validation=None):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element.
default (Mapping): The parameter's default value. If None, will be an empty dict.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
super(MapParameter, self).__init__(default or dict(), aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(MapParameter, self).collect_errors(instance, value)
element_type = self._element_type
errors.extend(InvalidElementTypeError(self.name, val, source, type(val), element_type, key)
for key, val in iteritems(value) if not isinstance(val, element_type))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# mapkeys with important matches
def key_is_important(match, key):
return match.valueflags(self.__class__).get(key) is ParameterFlag.final
important_maps = tuple(dict((k, v)
for k, v in iteritems(match.value(self.__class__))
if key_is_important(match, k))
for match in relevant_matches)
# dump all matches in a dict
# then overwrite with important matches
return merge(concatv((m.value(self.__class__) for m in relevant_matches),
reversed(important_maps)))
@classmethod
def repr_raw(cls, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
cls._str_format_flag(raw_parameter.keyflag())))
for valuekey, value in iteritems(raw_parameter.value(cls)):
valueflag = raw_parameter.valueflags(cls).get(valuekey)
lines.append(" %s: %s%s" % (valuekey, cls._str_format_value(value),
cls._str_format_flag(valueflag)))
return '\n'.join(lines)
class ConfigurationType(type):
"""metaclass for Configuration"""
def __init__(cls, name, bases, attr):
super(ConfigurationType, cls).__init__(name, bases, attr)
# call _set_name for each parameter
cls.parameter_names = tuple(p._set_name(name) for name, p in iteritems(cls.__dict__)
if isinstance(p, Parameter))
@with_metaclass(ConfigurationType)
class Configuration(object):
def __init__(self, search_path=(), app_name=None, argparse_args=None):
self.raw_data = odict()
self._cache = dict()
self._validation_errors = defaultdict(list)
if search_path:
self._add_search_path(search_path)
if app_name is not None:
self._add_env_vars(app_name)
if argparse_args is not None:
self._add_argparse_args(argparse_args)
def _add_search_path(self, search_path):
return self._add_raw_data(load_file_configs(search_path))
def _add_env_vars(self, app_name):
self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters(app_name)
self._cache = dict()
return self
def _add_argparse_args(self, argparse_args):
self._argparse_args = AttrDict((k, v) for k, v, in iteritems(vars(argparse_args))
if v is not NULL)
source = ArgParseRawParameter.source
self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(self._argparse_args)
self._cache = dict()
return self
def _add_raw_data(self, raw_data):
self.raw_data.update(raw_data)
self._cache = dict()
return self
def check_source(self, source):
# this method ends up duplicating much of the logic of Parameter.__get__
# I haven't yet found a way to make it more DRY though
typed_values = {}
validation_errors = []
raw_parameters = self.raw_data[source]
for key in self.parameter_names:
parameter = self.__class__.__dict__[key]
match, multikey_error = parameter._raw_parameters_from_single_source(raw_parameters)
if multikey_error:
validation_errors.append(multikey_error)
if match is not None:
try:
typed_value = typify_data_structure(match.value(parameter.__class__),
parameter._element_type)
except TypeCoercionError as e:
validation_errors.append(CustomValidationError(match.key, e.value,
match.source, text_type(e)))
else:
collected_errors = parameter.collect_errors(self, typed_value, match.source)
if collected_errors:
validation_errors.extend(collected_errors)
else:
typed_values[match.key] = typed_value # parameter.repr_raw(match)
else:
# this situation will happen if there is a multikey_error and none of the
# matched keys is the primary key
pass
return typed_values, validation_errors
def validate_all(self):
validation_errors = list(chain.from_iterable(self.check_source(source)[1]
for source in self.raw_data))
raise_errors(validation_errors)
def collect_all(self):
typed_values = odict()
validation_errors = odict()
for source in self.raw_data:
typed_values[source], validation_errors[source] = self.check_source(source)
raise_errors(tuple(chain.from_iterable(itervalues(validation_errors))))
return odict((k, v) for k, v in iteritems(typed_values) if v)
<|code_end|>
| conda/common/configuration.py
# -*- coding: utf-8 -*-
"""
A generalized application configuration utility.
Features include:
- lazy eval
- merges configuration files
- parameter type validation, with custom validation
- parameter aliases
Easily extensible to other source formats, e.g. json and ini
Limitations:
- at the moment only supports a "flat" config structure; no nested data structures
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
from collections import Mapping, Set, defaultdict
from enum import Enum
from glob import glob
from itertools import chain
from logging import getLogger
from os import environ, stat
from os.path import join
from stat import S_IFDIR, S_IFMT, S_IFREG
try:
from cytoolz.dicttoolz import merge
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concat, concatv, unique
except ImportError:
from .._vendor.toolz.dicttoolz import merge
from .._vendor.toolz.functoolz import excepts
from .._vendor.toolz.itertoolz import concat, concatv, unique
try:
from ruamel_yaml.comments import CommentedSeq, CommentedMap
except ImportError: # pragma: no cover
from ruamel.yaml.comments import CommentedSeq, CommentedMap # pragma: no cover
from .. import CondaError, CondaMultiError
from .._vendor.auxlib.collection import first, frozendict, last, AttrDict
from .._vendor.auxlib.exceptions import ThisShouldNeverHappenError
from .._vendor.auxlib.path import expand
from .._vendor.auxlib.type_coercion import typify_data_structure, TypeCoercionError
from ..base.constants import EMPTY_MAP, NULL
from .compat import (isiterable, iteritems, odict, primitive_types, text_type,
with_metaclass, string_types, itervalues)
from .yaml import yaml_load
__all__ = ["Configuration", "PrimitiveParameter",
"SequenceParameter", "MapParameter"]
log = getLogger(__name__)
def pretty_list(iterable, padding=' '): # TODO: move elsewhere in conda.common
if not isiterable(iterable):
iterable = [iterable]
return '\n'.join("%s- %s" % (padding, item) for item in iterable)
def pretty_map(dictionary, padding=' '):
return '\n'.join("%s%s: %s" % (padding, key, value) for key, value in iteritems(dictionary))
class ConfigurationError(CondaError):
pass
class ValidationError(ConfigurationError):
def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs):
self.parameter_name = parameter_name
self.parameter_value = parameter_value
self.source = source
super(ConfigurationError, self).__init__(msg, **kwargs)
def __str__(self):
return ("Parameter %s = %r declared in %s is invalid."
% (self.parameter_name, self.parameter_value, self.source))
class MultipleKeysError(ValidationError):
def __init__(self, source, keys, preferred_key):
self.source = source
self.keys = keys
msg = ("Multiple aliased keys in file %s:\n"
"%s"
"Must declare only one. Prefer '%s'" % (source, pretty_list(keys), preferred_key))
super(MultipleKeysError, self).__init__(preferred_key, None, source, msg=msg)
class InvalidTypeError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None):
self.wrong_type = wrong_type
self.valid_types = valid_types
if msg is None:
msg = ("Parameter %s = %r declared in %s has type %s.\n"
"Valid types: %s." % (parameter_name, parameter_value,
source, wrong_type, pretty_list(valid_types)))
super(InvalidTypeError, self).__init__(parameter_name, parameter_value, source, msg=msg)
class InvalidElementTypeError(InvalidTypeError):
def __init__(self, parameter_name, parameter_value, source, wrong_type,
valid_types, index_or_key):
qualifier = "at index" if isinstance(index_or_key, int) else "for key"
msg = ("Parameter %s declared in %s has invalid element %r %s %s.\n"
"Valid element types:\n"
"%s." % (parameter_name, source, parameter_value, qualifier,
index_or_key, pretty_list(valid_types)))
super(InvalidElementTypeError, self).__init__(parameter_name, parameter_value, source,
wrong_type, valid_types, msg=msg)
class CustomValidationError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, custom_message):
msg = ("Parameter %s = %r declared in %s is invalid.\n"
"%s" % (parameter_name, parameter_value, source, custom_message))
super(CustomValidationError, self).__init__(parameter_name, parameter_value, source,
msg=msg)
class MultiValidationError(CondaMultiError, ConfigurationError):
def __init__(self, errors, *args, **kwargs):
super(MultiValidationError, self).__init__(errors, *args, **kwargs)
def raise_errors(errors):
if not errors:
return True
elif len(errors) == 1:
raise errors[0]
else:
raise MultiValidationError(errors)
class ParameterFlag(Enum):
final = 'final'
top = "top"
bottom = "bottom"
def __str__(self):
return "%s" % self.value
@classmethod
def from_name(cls, name):
return cls[name]
@classmethod
def from_value(cls, value):
return cls(value)
@classmethod
def from_string(cls, string):
try:
string = string.strip('!#')
return cls.from_value(string)
except (ValueError, AttributeError):
return None
# TODO: move elsewhere, probably auxlib
# TODO: need to add order to at least frozendict, and preferrably frozenset
def make_immutable(value):
if isinstance(value, Mapping):
return frozendict(value)
elif isinstance(value, Set):
return frozenset(value)
elif isiterable(value):
return tuple(value)
else:
return value
@with_metaclass(ABCMeta)
class RawParameter(object):
def __init__(self, source, key, raw_value):
self.source = source
self.key = key
self._raw_value = raw_value
def __repr__(self):
return text_type(vars(self))
@abstractmethod
def value(self, parameter_obj):
raise NotImplementedError()
@abstractmethod
def keyflag(self):
raise NotImplementedError()
@abstractmethod
def valueflags(self, parameter_obj):
raise NotImplementedError()
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key])) for key in from_map)
return EMPTY_MAP
class EnvRawParameter(RawParameter):
source = 'envvars'
def value(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
raw_value = self._raw_value
if string_delimiter in raw_value:
value = raw_value.split(string_delimiter)
else:
value = [raw_value]
return tuple(v.strip() for v in value)
else:
return self.__important_split_value[0].strip()
def keyflag(self):
return ParameterFlag.final if len(self.__important_split_value) >= 2 else None
def valueflags(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
return tuple('' for _ in self._raw_value.split(string_delimiter))
else:
return self.__important_split_value[0].strip()
@property
def __important_split_value(self):
return self._raw_value.split("!important")
@classmethod
def make_raw_parameters(cls, appname):
keystart = "{0}_".format(appname.upper())
raw_env = dict((k.replace(keystart, '').lower(), v)
for k, v in iteritems(environ) if k.startswith(keystart))
return super(EnvRawParameter, cls).make_raw_parameters(EnvRawParameter.source, raw_env)
class ArgParseRawParameter(RawParameter):
source = 'cmd_line'
def value(self, parameter_obj):
return make_immutable(self._raw_value)
def keyflag(self):
return None
def valueflags(self, parameter_obj):
return None
@classmethod
def make_raw_parameters(cls, args_from_argparse):
return super(ArgParseRawParameter, cls).make_raw_parameters(ArgParseRawParameter.source,
args_from_argparse)
class YamlRawParameter(RawParameter):
# this class should encapsulate all direct use of ruamel.yaml in this module
def __init__(self, source, key, raw_value, keycomment):
self._keycomment = keycomment
super(YamlRawParameter, self).__init__(source, key, raw_value)
def value(self, parameter_obj):
self.__process(parameter_obj)
return self._value
def keyflag(self):
return ParameterFlag.from_string(self._keycomment)
def valueflags(self, parameter_obj):
self.__process(parameter_obj)
return self._valueflags
def __process(self, parameter_obj):
if hasattr(self, '_value'):
return
elif isinstance(self._raw_value, CommentedSeq):
valuecomments = self._get_yaml_list_comments(self._raw_value)
self._valueflags = tuple(ParameterFlag.from_string(s) for s in valuecomments)
self._value = tuple(self._raw_value)
elif isinstance(self._raw_value, CommentedMap):
valuecomments = self._get_yaml_map_comments(self._raw_value)
self._valueflags = dict((k, ParameterFlag.from_string(v))
for k, v in iteritems(valuecomments) if v is not None)
self._value = frozendict(self._raw_value)
elif isinstance(self._raw_value, primitive_types):
self._valueflags = None
self._value = self._raw_value
else:
raise ThisShouldNeverHappenError() # pragma: no cover
@staticmethod
def _get_yaml_key_comment(commented_dict, key):
try:
return commented_dict.ca.items[key][2].value.strip()
except (AttributeError, KeyError):
return None
@staticmethod
def _get_yaml_list_comments(value):
items = value.ca.items
raw_comment_lines = tuple(excepts((AttributeError, KeyError, TypeError),
lambda q: items.get(q)[0].value.strip() or None,
lambda _: None # default value on exception
)(q)
for q in range(len(value)))
return raw_comment_lines
@staticmethod
def _get_yaml_map_comments(rawvalue):
return dict((key, excepts(KeyError,
lambda k: rawvalue.ca.items[k][2].value.strip() or None,
lambda _: None # default value on exception
)(key))
for key in rawvalue)
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key],
cls._get_yaml_key_comment(from_map, key)))
for key in from_map)
return EMPTY_MAP
@classmethod
def make_raw_parameters_from_file(cls, filepath):
with open(filepath, 'r') as fh:
ruamel_yaml = yaml_load(fh)
return cls.make_raw_parameters(filepath, ruamel_yaml) or EMPTY_MAP
def load_file_configs(search_path):
# returns an ordered map of filepath and dict of raw parameter objects
def _file_yaml_loader(fullpath):
assert fullpath.endswith(".yml") or fullpath.endswith("condarc"), fullpath
yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath)
def _dir_yaml_loader(fullpath):
for filepath in glob(join(fullpath, "*.yml")):
yield filepath, YamlRawParameter.make_raw_parameters_from_file(filepath)
# map a stat result to a file loader or a directory loader
_loader = {
S_IFREG: _file_yaml_loader,
S_IFDIR: _dir_yaml_loader,
}
def _get_st_mode(path):
# stat the path for file type, or None if path doesn't exist
try:
return S_IFMT(stat(path).st_mode)
except OSError:
return None
expanded_paths = tuple(expand(path) for path in search_path)
stat_paths = (_get_st_mode(path) for path in expanded_paths)
load_paths = (_loader[st_mode](path)
for path, st_mode in zip(expanded_paths, stat_paths)
if st_mode is not None)
raw_data = odict(kv for kv in chain.from_iterable(load_paths))
return raw_data
@with_metaclass(ABCMeta)
class Parameter(object):
_type = None
_element_type = None
def __init__(self, default, aliases=(), validation=None):
self._name = None
self._names = None
self.default = default
self.aliases = aliases
self._validation = validation
def _set_name(self, name):
# this is an explicit method, and not a descriptor/setter
# it's meant to be called by the Configuration metaclass
self._name = name
self._names = frozenset(x for x in chain(self.aliases, (name, )))
return name
@property
def name(self):
if self._name is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._name
@property
def names(self):
if self._names is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._names
def _raw_parameters_from_single_source(self, raw_parameters):
# while supporting parameter name aliases, we enforce that only one definition is given
# per data source
keys = self.names & frozenset(raw_parameters.keys())
matches = {key: raw_parameters[key] for key in keys}
numkeys = len(keys)
if numkeys == 0:
return None, None
elif numkeys == 1:
return next(itervalues(matches)), None
elif self.name in keys:
return matches[self.name], MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
else:
return None, MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
def _get_all_matches(self, instance):
# a match is a raw parameter instance
matches = []
multikey_exceptions = []
for filepath, raw_parameters in iteritems(instance.raw_data):
match, error = self._raw_parameters_from_single_source(raw_parameters)
if match is not None:
matches.append(match)
if error:
multikey_exceptions.append(error)
return matches, multikey_exceptions
@abstractmethod
def _merge(self, matches):
raise NotImplementedError()
def __get__(self, instance, instance_type):
# strategy is "extract and merge," which is actually just map and reduce
# extract matches from each source in SEARCH_PATH
# then merge matches together
if self.name in instance._cache:
return instance._cache[self.name]
matches, errors = self._get_all_matches(instance)
try:
result = typify_data_structure(self._merge(matches) if matches else self.default,
self._element_type)
except TypeCoercionError as e:
errors.append(CustomValidationError(self.name, e.value, "<<merged>>", text_type(e)))
else:
errors.extend(self.collect_errors(instance, result))
raise_errors(errors)
instance._cache[self.name] = result
return result
def collect_errors(self, instance, value, source="<<merged>>"):
"""Validate a Parameter value.
Args:
instance (Configuration): The instance object to which the Parameter descriptor is
attached.
value: The value to be validated.
"""
errors = []
if not isinstance(value, self._type):
errors.append(InvalidTypeError(self.name, value, source, type(value),
self._type))
elif self._validation is not None:
result = self._validation(value)
if result is False:
errors.append(ValidationError(self.name, value, source))
elif isinstance(result, string_types):
errors.append(CustomValidationError(self.name, value, source, result))
return errors
def _match_key_is_important(self, raw_parameter):
return raw_parameter.keyflag() is ParameterFlag.final
def _first_important_matches(self, matches):
idx = first(enumerate(matches), lambda x: self._match_key_is_important(x[1]),
apply=lambda x: x[0])
return matches if idx is None else matches[:idx+1]
@staticmethod
def _str_format_flag(flag):
return " #!%s" % flag if flag is not None else ''
@staticmethod
def _str_format_value(value):
if value is None:
return 'None'
return value
@classmethod
def repr_raw(cls, raw_parameter):
raise NotImplementedError()
class PrimitiveParameter(Parameter):
"""Parameter type for a Configuration class that holds a single python primitive value.
The python primitive types are str, int, float, complex, bool, and NoneType. In addition,
python 2 has long and unicode types.
"""
def __init__(self, default, aliases=(), validation=None, parameter_type=None):
"""
Args:
default (Any): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
parameter_type (type or Tuple[type]): Type-validation of parameter's value. If None,
type(default) is used.
"""
self._type = type(default) if parameter_type is None else parameter_type
self._element_type = self._type
super(PrimitiveParameter, self).__init__(default, aliases, validation)
def _merge(self, matches):
important_match = first(matches, self._match_key_is_important, default=None)
if important_match is not None:
return important_match.value(self)
last_match = last(matches, lambda x: x is not None, default=None)
if last_match is not None:
return last_match.value(self)
raise ThisShouldNeverHappenError() # pragma: no cover
def repr_raw(self, raw_parameter):
return "%s: %s%s" % (raw_parameter.key,
self._str_format_value(raw_parameter.value(self)),
self._str_format_flag(raw_parameter.keyflag()))
class SequenceParameter(Parameter):
"""Parameter type for a Configuration class that holds a sequence (i.e. list) of python
primitive values.
"""
_type = tuple
def __init__(self, element_type, default=(), aliases=(), validation=None,
string_delimiter=','):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element in
the sequence.
default (Iterable[str]): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
self.string_delimiter = string_delimiter
super(SequenceParameter, self).__init__(default, aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(SequenceParameter, self).collect_errors(instance, value)
element_type = self._element_type
for idx, element in enumerate(value):
if not isinstance(element, element_type):
errors.append(InvalidElementTypeError(self.name, element, source,
type(element), element_type, idx))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# get individual lines from important_matches that were marked important
# these will be prepended to the final result
def get_marked_lines(match, marker, parameter_obj):
return tuple(line
for line, flag in zip(match.value(parameter_obj),
match.valueflags(parameter_obj))
if flag is marker)
top_lines = concat(get_marked_lines(m, ParameterFlag.top, self) for m in relevant_matches)
# also get lines that were marked as bottom, but reverse the match order so that lines
# coming earlier will ultimately be last
bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom, self) for m in
reversed(relevant_matches))
# now, concat all lines, while reversing the matches
# reverse because elements closer to the end of search path take precedence
all_lines = concat(m.value(self) for m in reversed(relevant_matches))
# stack top_lines + all_lines, then de-dupe
top_deduped = tuple(unique(concatv(top_lines, all_lines)))
# take the top-deduped lines, reverse them, and concat with reversed bottom_lines
# this gives us the reverse of the order we want, but almost there
# NOTE: for a line value marked both top and bottom, the bottom marker will win out
# for the top marker to win out, we'd need one additional de-dupe step
bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))
# just reverse, and we're good to go
return tuple(reversed(tuple(bottom_deduped)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for q, value in enumerate(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self)[q]
lines.append(" - %s%s" % (self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class MapParameter(Parameter):
"""Parameter type for a Configuration class that holds a map (i.e. dict) of python
primitive values.
"""
_type = dict
def __init__(self, element_type, default=None, aliases=(), validation=None):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element.
default (Mapping): The parameter's default value. If None, will be an empty dict.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
super(MapParameter, self).__init__(default or dict(), aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(MapParameter, self).collect_errors(instance, value)
element_type = self._element_type
errors.extend(InvalidElementTypeError(self.name, val, source, type(val), element_type, key)
for key, val in iteritems(value) if not isinstance(val, element_type))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# mapkeys with important matches
def key_is_important(match, key):
return match.valueflags(self).get(key) is ParameterFlag.final
important_maps = tuple(dict((k, v)
for k, v in iteritems(match.value(self))
if key_is_important(match, k))
for match in relevant_matches)
# dump all matches in a dict
# then overwrite with important matches
return merge(concatv((m.value(self) for m in relevant_matches),
reversed(important_maps)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for valuekey, value in iteritems(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self).get(valuekey)
lines.append(" %s: %s%s" % (valuekey, self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class ConfigurationType(type):
"""metaclass for Configuration"""
def __init__(cls, name, bases, attr):
super(ConfigurationType, cls).__init__(name, bases, attr)
# call _set_name for each parameter
cls.parameter_names = tuple(p._set_name(name) for name, p in iteritems(cls.__dict__)
if isinstance(p, Parameter))
@with_metaclass(ConfigurationType)
class Configuration(object):
def __init__(self, search_path=(), app_name=None, argparse_args=None):
self.raw_data = odict()
self._cache = dict()
self._validation_errors = defaultdict(list)
if search_path:
self._add_search_path(search_path)
if app_name is not None:
self._add_env_vars(app_name)
if argparse_args is not None:
self._add_argparse_args(argparse_args)
def _add_search_path(self, search_path):
return self._add_raw_data(load_file_configs(search_path))
def _add_env_vars(self, app_name):
self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters(app_name)
self._cache = dict()
return self
def _add_argparse_args(self, argparse_args):
self._argparse_args = AttrDict((k, v) for k, v, in iteritems(vars(argparse_args))
if v is not NULL)
source = ArgParseRawParameter.source
self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(self._argparse_args)
self._cache = dict()
return self
def _add_raw_data(self, raw_data):
self.raw_data.update(raw_data)
self._cache = dict()
return self
def check_source(self, source):
# this method ends up duplicating much of the logic of Parameter.__get__
# I haven't yet found a way to make it more DRY though
typed_values = {}
validation_errors = []
raw_parameters = self.raw_data[source]
for key in self.parameter_names:
parameter = self.__class__.__dict__[key]
match, multikey_error = parameter._raw_parameters_from_single_source(raw_parameters)
if multikey_error:
validation_errors.append(multikey_error)
if match is not None:
try:
typed_value = typify_data_structure(match.value(parameter),
parameter._element_type)
except TypeCoercionError as e:
validation_errors.append(CustomValidationError(match.key, e.value,
match.source, text_type(e)))
else:
collected_errors = parameter.collect_errors(self, typed_value, match.source)
if collected_errors:
validation_errors.extend(collected_errors)
else:
typed_values[match.key] = typed_value # parameter.repr_raw(match)
else:
# this situation will happen if there is a multikey_error and none of the
# matched keys is the primary key
pass
return typed_values, validation_errors
def validate_all(self):
validation_errors = list(chain.from_iterable(self.check_source(source)[1]
for source in self.raw_data))
raise_errors(validation_errors)
def collect_all(self):
typed_values = odict()
validation_errors = odict()
for source in self.raw_data:
typed_values[source], validation_errors[source] = self.check_source(source)
raise_errors(tuple(chain.from_iterable(itervalues(validation_errors))))
return odict((k, v) for k, v in iteritems(typed_values) if v)
| conda/common/configuration.py
--- a/conda/common/configuration.py
+++ b/conda/common/configuration.py
@@ -188,7 +188,7 @@ def __repr__(self):
return text_type(vars(self))
@abstractmethod
- def value(self, parameter_type):
+ def value(self, parameter_obj):
raise NotImplementedError()
@abstractmethod
@@ -196,7 +196,7 @@ def keyflag(self):
raise NotImplementedError()
@abstractmethod
- def valueflags(self, parameter_type):
+ def valueflags(self, parameter_obj):
raise NotImplementedError()
@classmethod
@@ -209,14 +209,29 @@ def make_raw_parameters(cls, source, from_map):
class EnvRawParameter(RawParameter):
source = 'envvars'
- def value(self, parameter_type):
- return self.__important_split_value[0].strip()
+ def value(self, parameter_obj):
+ if hasattr(parameter_obj, 'string_delimiter'):
+ string_delimiter = getattr(parameter_obj, 'string_delimiter')
+ # TODO: add stripping of !important, !top, and !bottom
+ raw_value = self._raw_value
+ if string_delimiter in raw_value:
+ value = raw_value.split(string_delimiter)
+ else:
+ value = [raw_value]
+ return tuple(v.strip() for v in value)
+ else:
+ return self.__important_split_value[0].strip()
def keyflag(self):
return ParameterFlag.final if len(self.__important_split_value) >= 2 else None
- def valueflags(self, parameter_type):
- return None
+ def valueflags(self, parameter_obj):
+ if hasattr(parameter_obj, 'string_delimiter'):
+ string_delimiter = getattr(parameter_obj, 'string_delimiter')
+ # TODO: add stripping of !important, !top, and !bottom
+ return tuple('' for _ in self._raw_value.split(string_delimiter))
+ else:
+ return self.__important_split_value[0].strip()
@property
def __important_split_value(self):
@@ -233,13 +248,13 @@ def make_raw_parameters(cls, appname):
class ArgParseRawParameter(RawParameter):
source = 'cmd_line'
- def value(self, parameter_type):
+ def value(self, parameter_obj):
return make_immutable(self._raw_value)
def keyflag(self):
return None
- def valueflags(self, parameter_type):
+ def valueflags(self, parameter_obj):
return None
@classmethod
@@ -255,18 +270,18 @@ def __init__(self, source, key, raw_value, keycomment):
self._keycomment = keycomment
super(YamlRawParameter, self).__init__(source, key, raw_value)
- def value(self, parameter_type):
- self.__process(parameter_type)
+ def value(self, parameter_obj):
+ self.__process(parameter_obj)
return self._value
def keyflag(self):
return ParameterFlag.from_string(self._keycomment)
- def valueflags(self, parameter_type):
- self.__process(parameter_type)
+ def valueflags(self, parameter_obj):
+ self.__process(parameter_obj)
return self._valueflags
- def __process(self, parameter_type):
+ def __process(self, parameter_obj):
if hasattr(self, '_value'):
return
elif isinstance(self._raw_value, CommentedSeq):
@@ -511,18 +526,17 @@ def __init__(self, default, aliases=(), validation=None, parameter_type=None):
def _merge(self, matches):
important_match = first(matches, self._match_key_is_important, default=None)
if important_match is not None:
- return important_match.value(self.__class__)
+ return important_match.value(self)
last_match = last(matches, lambda x: x is not None, default=None)
if last_match is not None:
- return last_match.value(self.__class__)
+ return last_match.value(self)
raise ThisShouldNeverHappenError() # pragma: no cover
- @classmethod
- def repr_raw(cls, raw_parameter):
+ def repr_raw(self, raw_parameter):
return "%s: %s%s" % (raw_parameter.key,
- cls._str_format_value(raw_parameter.value(cls)),
- cls._str_format_flag(raw_parameter.keyflag()))
+ self._str_format_value(raw_parameter.value(self)),
+ self._str_format_flag(raw_parameter.keyflag()))
class SequenceParameter(Parameter):
@@ -531,7 +545,8 @@ class SequenceParameter(Parameter):
"""
_type = tuple
- def __init__(self, element_type, default=(), aliases=(), validation=None):
+ def __init__(self, element_type, default=(), aliases=(), validation=None,
+ string_delimiter=','):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element in
@@ -543,6 +558,7 @@ def __init__(self, element_type, default=(), aliases=(), validation=None):
"""
self._element_type = element_type
+ self.string_delimiter = string_delimiter
super(SequenceParameter, self).__init__(default, aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
@@ -562,21 +578,21 @@ def _merge(self, matches):
# get individual lines from important_matches that were marked important
# these will be prepended to the final result
- def get_marked_lines(match, marker):
+ def get_marked_lines(match, marker, parameter_obj):
return tuple(line
- for line, flag in zip(match.value(self.__class__),
- match.valueflags(self.__class__))
+ for line, flag in zip(match.value(parameter_obj),
+ match.valueflags(parameter_obj))
if flag is marker)
- top_lines = concat(get_marked_lines(m, ParameterFlag.top) for m in relevant_matches)
+ top_lines = concat(get_marked_lines(m, ParameterFlag.top, self) for m in relevant_matches)
# also get lines that were marked as bottom, but reverse the match order so that lines
# coming earlier will ultimately be last
- bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom) for m in
+ bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom, self) for m in
reversed(relevant_matches))
# now, concat all lines, while reversing the matches
# reverse because elements closer to the end of search path take precedence
- all_lines = concat(m.value(self.__class__) for m in reversed(relevant_matches))
+ all_lines = concat(m.value(self) for m in reversed(relevant_matches))
# stack top_lines + all_lines, then de-dupe
top_deduped = tuple(unique(concatv(top_lines, all_lines)))
@@ -586,19 +602,17 @@ def get_marked_lines(match, marker):
# NOTE: for a line value marked both top and bottom, the bottom marker will win out
# for the top marker to win out, we'd need one additional de-dupe step
bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))
-
# just reverse, and we're good to go
return tuple(reversed(tuple(bottom_deduped)))
- @classmethod
- def repr_raw(cls, raw_parameter):
+ def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
- cls._str_format_flag(raw_parameter.keyflag())))
- for q, value in enumerate(raw_parameter.value(cls)):
- valueflag = raw_parameter.valueflags(cls)[q]
- lines.append(" - %s%s" % (cls._str_format_value(value),
- cls._str_format_flag(valueflag)))
+ self._str_format_flag(raw_parameter.keyflag())))
+ for q, value in enumerate(raw_parameter.value(self)):
+ valueflag = raw_parameter.valueflags(self)[q]
+ lines.append(" - %s%s" % (self._str_format_value(value),
+ self._str_format_flag(valueflag)))
return '\n'.join(lines)
@@ -635,25 +649,24 @@ def _merge(self, matches):
# mapkeys with important matches
def key_is_important(match, key):
- return match.valueflags(self.__class__).get(key) is ParameterFlag.final
+ return match.valueflags(self).get(key) is ParameterFlag.final
important_maps = tuple(dict((k, v)
- for k, v in iteritems(match.value(self.__class__))
+ for k, v in iteritems(match.value(self))
if key_is_important(match, k))
for match in relevant_matches)
# dump all matches in a dict
# then overwrite with important matches
- return merge(concatv((m.value(self.__class__) for m in relevant_matches),
+ return merge(concatv((m.value(self) for m in relevant_matches),
reversed(important_maps)))
- @classmethod
- def repr_raw(cls, raw_parameter):
+ def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
- cls._str_format_flag(raw_parameter.keyflag())))
- for valuekey, value in iteritems(raw_parameter.value(cls)):
- valueflag = raw_parameter.valueflags(cls).get(valuekey)
- lines.append(" %s: %s%s" % (valuekey, cls._str_format_value(value),
- cls._str_format_flag(valueflag)))
+ self._str_format_flag(raw_parameter.keyflag())))
+ for valuekey, value in iteritems(raw_parameter.value(self)):
+ valueflag = raw_parameter.valueflags(self).get(valuekey)
+ lines.append(" %s: %s%s" % (valuekey, self._str_format_value(value),
+ self._str_format_flag(valueflag)))
return '\n'.join(lines)
@@ -717,7 +730,7 @@ def check_source(self, source):
if match is not None:
try:
- typed_value = typify_data_structure(match.value(parameter.__class__),
+ typed_value = typify_data_structure(match.value(parameter),
parameter._element_type)
except TypeCoercionError as e:
validation_errors.append(CustomValidationError(match.key, e.value, |
Conda 4.2 branch doesn't support CIO_TEST.
In config.py in `conda 4.1.x` I see:
```
def get_channel_urls(platform=None):
if os.getenv('CIO_TEST'):
import cio_test
base_urls = cio_test.base_urls
```
cio_test is not imported anywhere in 4.2.
| conda/base/context.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
import sys
from conda._vendor.auxlib.path import expand
from itertools import chain
from logging import getLogger
from os.path import abspath, basename, dirname, expanduser, isdir, join
from platform import machine
from .constants import DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME, SEARCH_PATH, conda
from .._vendor.auxlib.compat import NoneType, string_types
from .._vendor.auxlib.ish import dals
from .._vendor.toolz.itertoolz import concatv
from ..common.configuration import (Configuration, MapParameter, PrimitiveParameter,
SequenceParameter)
from ..common.url import urlparse
from ..exceptions import CondaEnvironmentNotFoundError, CondaValueError
log = getLogger(__name__)
default_python = '%d.%d' % sys.version_info[:2]
# ----- operating system and architecture -----
_platform_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
_arch_names = {
32: 'x86',
64: 'x86_64',
}
class Context(Configuration):
default_python = property(lambda self: default_python)
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
auto_update_conda = PrimitiveParameter(True, aliases=('self_update',))
changeps1 = PrimitiveParameter(True)
create_default_packages = SequenceParameter(string_types)
disallow = SequenceParameter(string_types)
force_32bit = PrimitiveParameter(False)
ssl_verify = PrimitiveParameter(True, parameter_type=string_types + (bool,))
track_features = SequenceParameter(string_types)
use_pip = PrimitiveParameter(True)
proxy_servers = MapParameter(string_types)
_root_dir = PrimitiveParameter(sys.prefix, aliases=('root_dir',))
# channels
channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS)
channels = SequenceParameter(string_types, default=('defaults',))
default_channels = SequenceParameter(string_types, DEFAULT_CHANNELS)
# command line
always_copy = PrimitiveParameter(False, aliases=('copy',))
always_yes = PrimitiveParameter(False, aliases=('yes',))
channel_priority = PrimitiveParameter(True)
debug = PrimitiveParameter(False)
json = PrimitiveParameter(False)
offline = PrimitiveParameter(False)
quiet = PrimitiveParameter(False)
shortcuts = PrimitiveParameter(True)
show_channel_urls = PrimitiveParameter(None, parameter_type=(bool, NoneType))
update_dependencies = PrimitiveParameter(True, aliases=('update_deps',))
verbosity = PrimitiveParameter(0, aliases=('verbose',), parameter_type=int)
# conda_build
bld_path = PrimitiveParameter('')
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
@property
def arch_name(self):
m = machine()
if self.platform == 'linux' and m in non_x86_linux_machines:
return m
else:
return _arch_names[self.bits]
@property
def platform(self):
return _platform_map.get(sys.platform, 'unknown')
@property
def subdir(self):
m = machine()
if m in non_x86_linux_machines:
return 'linux-%s' % m
else:
return '%s-%d' % (self.platform, self.bits)
@property
def bits(self):
if self.force_32bit:
return 32
else:
return 8 * tuple.__itemsize__
@property
def local_build_root(self):
if self.bld_path:
return expand(self.bld_path)
elif self.root_writable:
return join(self.root_dir, 'conda-bld')
else:
return expand('~/conda-bld')
@property
def root_dir(self):
# root_dir is an alias for root_prefix, we prefer the name "root_prefix"
# because it is more consistent with other names
return abspath(expanduser(self._root_dir))
@property
def root_writable(self):
from conda.common.disk import try_write
return try_write(self.root_dir)
_envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs',))
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
for p in concatv(self._envs_dirs,
(join(self.root_dir, 'envs'), )
if self.root_writable
else ('~/.conda/envs', join(self.root_dir, 'envs'))))
@property
def pkgs_dirs(self):
return [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in self.envs_dirs]
@property
def default_prefix(self):
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, ROOT_ENV_NAME):
return self.root_dir
elif os.sep in _default_env:
return abspath(_default_env)
else:
for envs_dir in self.envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
return default_prefix
return join(self.envs_dirs[0], _default_env)
@property
def prefix(self):
return get_prefix(self, self._argparse_args, False)
@property
def prefix_w_legacy_search(self):
return get_prefix(self, self._argparse_args, True)
@property
def clone_src(self):
assert self._argparse_args.clone is not None
return locate_prefix_by_name(self, self._argparse_args.clone)
@property
def conda_in_root(self):
return not conda_in_private_env()
@property
def conda_private(self):
return conda_in_private_env()
@property
def root_prefix(self):
return abspath(join(sys.prefix, '..', '..')) if conda_in_private_env() else sys.prefix
@property
def conda_prefix(self):
return sys.prefix
@property
def binstar_hosts(self):
return (urlparse(self.channel_alias).hostname,
'anaconda.org',
'binstar.org')
def conda_in_private_env():
# conda is located in its own private environment named '_conda'
return basename(sys.prefix) == '_conda' and basename(dirname(sys.prefix)) == 'envs'
context = Context(SEARCH_PATH, conda, None)
def reset_context(search_path=SEARCH_PATH, argparse_args=None):
context.__init__(search_path, conda, argparse_args)
from ..models.channel import Channel
Channel._reset_state()
return context
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(context.root_dir, 'envs')):
return join(context.root_dir, 'pkgs32' if context.force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
def get_help_dict():
# this is a function so that most of the time it's not evaluated and loaded into memory
return {
'add_pip_as_python_dependency': dals("""
"""),
'always_yes': dals("""
"""),
'always_copy': dals("""
"""),
'changeps1': dals("""
"""),
'use_pip': dals("""
Use pip when listing packages with conda list. Note that this does not affect any
conda command or functionality other than the output of the command conda list.
"""),
'binstar_upload': dals("""
"""),
'allow_softlinks': dals("""
"""),
'self_update': dals("""
"""),
'show_channel_urls': dals("""
# show channel URLs when displaying what is going to be downloaded
# None means letting conda decide
"""),
'update_dependencies': dals("""
"""),
'channel_priority': dals("""
"""),
'ssl_verify': dals("""
# ssl_verify can be a boolean value or a filename string
"""),
'track_features': dals("""
"""),
'channels': dals("""
"""),
'disallow': dals("""
# set packages disallowed to be installed
"""),
'create_default_packages': dals("""
# packages which are added to a newly created environment by default
"""),
'envs_dirs': dals("""
"""),
'default_channels': dals("""
"""),
'proxy_servers': dals("""
"""),
'force_32bit': dals("""
CONDA_FORCE_32BIT should only be used when running conda-build (in order
to build 32-bit packages on a 64-bit system). We don't want to mention it
in the documentation, because it can mess up a lot of things.
""")
}
def get_prefix(ctx, args, search=True):
"""Get the prefix to operate in
Args:
ctx: the context of conda
args: the argparse args from the command line
search: whether search for prefix
Returns: the prefix
Raises: CondaEnvironmentNotFoundError if the prefix is invalid
"""
if args.name:
if '/' in args.name:
raise CondaValueError("'/' not allowed in environment name: %s" %
args.name, getattr(args, 'json', False))
if args.name == ROOT_ENV_NAME:
return ctx.root_dir
if search:
return locate_prefix_by_name(ctx, args.name)
else:
return join(ctx.envs_dirs[0], args.name)
elif args.prefix:
return abspath(expanduser(args.prefix))
else:
return ctx.default_prefix
def locate_prefix_by_name(ctx, name):
""" Find the location of a prefix given a conda env name.
Args:
ctx (Context): the context object
name (str): the name of prefix to find
Returns:
str: the location of the prefix found, or CondaValueError will raise if not found
Raises:
CondaValueError: when no prefix is found
"""
if name == ROOT_ENV_NAME:
return ctx.root_dir
# look for a directory named `name` in all envs_dirs AND in CWD
for envs_dir in chain(ctx.envs_dirs + (os.getcwd(),)):
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
raise CondaEnvironmentNotFoundError(name)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(context.root_dir) and
not context.root_writable)
<|code_end|>
conda/config.py
<|code_start|>
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from os.path import abspath, expanduser, isfile, join
from conda.base.context import context, non_x86_linux_machines # NOQA
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'auto_update_conda',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
'shortcuts',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
root_dir = context.root_prefix
root_writable = context.root_writable
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
get_rc_urls = lambda: context.channels
def get_local_urls():
from conda.models.channel import get_conda_build_local_url
return get_conda_build_local_url() or []
class RC(object):
def get(self, key, default=None):
return getattr(context, key, default)
rc = RC()
envs_dirs = context.envs_dirs
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
pkgs_dirs = context.pkgs_dirs
default_prefix = context.default_prefix
subdir = context.subdir
arch_name = context.arch_name
bits = context.bits
platform = context.platform
# put back because of conda build
default_python = context. default_python
binstar_upload = context.binstar_upload
<|code_end|>
| conda/base/context.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
import sys
from conda._vendor.auxlib.path import expand
from itertools import chain
from logging import getLogger
from os.path import abspath, basename, dirname, expanduser, isdir, join
from platform import machine
from .constants import DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME, SEARCH_PATH, conda
from .._vendor.auxlib.compat import NoneType, string_types
from .._vendor.auxlib.ish import dals
from .._vendor.toolz.itertoolz import concatv
from ..common.configuration import (Configuration, MapParameter, PrimitiveParameter,
SequenceParameter)
from ..common.url import urlparse
from ..exceptions import CondaEnvironmentNotFoundError, CondaValueError
log = getLogger(__name__)
try:
import cio_test # NOQA
except ImportError:
log.info("No cio_test package found.")
_platform_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
_arch_names = {
32: 'x86',
64: 'x86_64',
}
class Context(Configuration):
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
auto_update_conda = PrimitiveParameter(True, aliases=('self_update',))
changeps1 = PrimitiveParameter(True)
create_default_packages = SequenceParameter(string_types)
disallow = SequenceParameter(string_types)
force_32bit = PrimitiveParameter(False)
ssl_verify = PrimitiveParameter(True, parameter_type=string_types + (bool,))
track_features = SequenceParameter(string_types)
use_pip = PrimitiveParameter(True)
proxy_servers = MapParameter(string_types)
_root_dir = PrimitiveParameter(sys.prefix, aliases=('root_dir',))
# channels
channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS)
channels = SequenceParameter(string_types, default=('defaults',))
default_channels = SequenceParameter(string_types, DEFAULT_CHANNELS)
# command line
always_copy = PrimitiveParameter(False, aliases=('copy',))
always_yes = PrimitiveParameter(False, aliases=('yes',))
channel_priority = PrimitiveParameter(True)
debug = PrimitiveParameter(False)
json = PrimitiveParameter(False)
offline = PrimitiveParameter(False)
quiet = PrimitiveParameter(False)
shortcuts = PrimitiveParameter(True)
show_channel_urls = PrimitiveParameter(None, parameter_type=(bool, NoneType))
update_dependencies = PrimitiveParameter(True, aliases=('update_deps',))
verbosity = PrimitiveParameter(0, aliases=('verbose',), parameter_type=int)
# conda_build
bld_path = PrimitiveParameter('')
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
@property
def default_python(self):
ver = sys.version_info
return '%d.%d' % (ver.major, ver.minor)
@property
def arch_name(self):
m = machine()
if self.platform == 'linux' and m in non_x86_linux_machines:
return m
else:
return _arch_names[self.bits]
@property
def platform(self):
return _platform_map.get(sys.platform, 'unknown')
@property
def subdir(self):
m = machine()
if m in non_x86_linux_machines:
return 'linux-%s' % m
else:
return '%s-%d' % (self.platform, self.bits)
@property
def bits(self):
if self.force_32bit:
return 32
else:
return 8 * tuple.__itemsize__
@property
def local_build_root(self):
if self.bld_path:
return expand(self.bld_path)
elif self.root_writable:
return join(self.root_dir, 'conda-bld')
else:
return expand('~/conda-bld')
@property
def root_dir(self):
# root_dir is an alias for root_prefix, we prefer the name "root_prefix"
# because it is more consistent with other names
return abspath(expanduser(self._root_dir))
@property
def root_writable(self):
from ..utils import try_write
return try_write(self.root_dir)
_envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs',))
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
for p in concatv(self._envs_dirs,
(join(self.root_dir, 'envs'), )
if self.root_writable
else ('~/.conda/envs', join(self.root_dir, 'envs'))))
@property
def pkgs_dirs(self):
return [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in self.envs_dirs]
@property
def default_prefix(self):
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, ROOT_ENV_NAME):
return self.root_dir
elif os.sep in _default_env:
return abspath(_default_env)
else:
for envs_dir in self.envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
return default_prefix
return join(self.envs_dirs[0], _default_env)
@property
def prefix(self):
return get_prefix(self, self._argparse_args, False)
@property
def prefix_w_legacy_search(self):
return get_prefix(self, self._argparse_args, True)
@property
def clone_src(self):
assert self._argparse_args.clone is not None
return locate_prefix_by_name(self, self._argparse_args.clone)
@property
def conda_in_root(self):
return not conda_in_private_env()
@property
def conda_private(self):
return conda_in_private_env()
@property
def root_prefix(self):
return abspath(join(sys.prefix, '..', '..')) if conda_in_private_env() else sys.prefix
@property
def conda_prefix(self):
return sys.prefix
@property
def binstar_hosts(self):
return (urlparse(self.channel_alias).hostname,
'anaconda.org',
'binstar.org')
def conda_in_private_env():
# conda is located in its own private environment named '_conda'
return basename(sys.prefix) == '_conda' and basename(dirname(sys.prefix)) == 'envs'
context = Context(SEARCH_PATH, conda, None)
def reset_context(search_path=SEARCH_PATH, argparse_args=None):
context.__init__(search_path, conda, argparse_args)
from ..models.channel import Channel
Channel._reset_state()
return context
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(context.root_dir, 'envs')):
return join(context.root_dir, 'pkgs32' if context.force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
def get_help_dict():
# this is a function so that most of the time it's not evaluated and loaded into memory
return {
'add_pip_as_python_dependency': dals("""
"""),
'always_yes': dals("""
"""),
'always_copy': dals("""
"""),
'changeps1': dals("""
"""),
'use_pip': dals("""
Use pip when listing packages with conda list. Note that this does not affect any
conda command or functionality other than the output of the command conda list.
"""),
'binstar_upload': dals("""
"""),
'allow_softlinks': dals("""
"""),
'self_update': dals("""
"""),
'show_channel_urls': dals("""
# show channel URLs when displaying what is going to be downloaded
# None means letting conda decide
"""),
'update_dependencies': dals("""
"""),
'channel_priority': dals("""
"""),
'ssl_verify': dals("""
# ssl_verify can be a boolean value or a filename string
"""),
'track_features': dals("""
"""),
'channels': dals("""
"""),
'disallow': dals("""
# set packages disallowed to be installed
"""),
'create_default_packages': dals("""
# packages which are added to a newly created environment by default
"""),
'envs_dirs': dals("""
"""),
'default_channels': dals("""
"""),
'proxy_servers': dals("""
"""),
'force_32bit': dals("""
CONDA_FORCE_32BIT should only be used when running conda-build (in order
to build 32-bit packages on a 64-bit system). We don't want to mention it
in the documentation, because it can mess up a lot of things.
""")
}
def get_prefix(ctx, args, search=True):
"""Get the prefix to operate in
Args:
ctx: the context of conda
args: the argparse args from the command line
search: whether search for prefix
Returns: the prefix
Raises: CondaEnvironmentNotFoundError if the prefix is invalid
"""
if args.name:
if '/' in args.name:
raise CondaValueError("'/' not allowed in environment name: %s" %
args.name, getattr(args, 'json', False))
if args.name == ROOT_ENV_NAME:
return ctx.root_dir
if search:
return locate_prefix_by_name(ctx, args.name)
else:
return join(ctx.envs_dirs[0], args.name)
elif args.prefix:
return abspath(expanduser(args.prefix))
else:
return ctx.default_prefix
def locate_prefix_by_name(ctx, name):
""" Find the location of a prefix given a conda env name.
Args:
ctx (Context): the context object
name (str): the name of prefix to find
Returns:
str: the location of the prefix found, or CondaValueError will raise if not found
Raises:
CondaValueError: when no prefix is found
"""
if name == ROOT_ENV_NAME:
return ctx.root_dir
# look for a directory named `name` in all envs_dirs AND in CWD
for envs_dir in chain(ctx.envs_dirs + (os.getcwd(),)):
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
raise CondaEnvironmentNotFoundError(name)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(context.root_dir) and
not context.root_writable)
conda/config.py
# (c) 2012-2015 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
import os
import sys
from os.path import abspath, expanduser, isfile, join
from conda.base.context import context, non_x86_linux_machines # NOQA
# ----- rc file -----
# This is used by conda config to check which keys are allowed in the config
# file. Be sure to update it when new keys are added.
#################################################################
# Also update the example condarc file when you add a key here! #
#################################################################
rc_list_keys = [
'channels',
'disallow',
'create_default_packages',
'track_features',
'envs_dirs',
'default_channels',
]
rc_bool_keys = [
'add_binstar_token',
'add_anaconda_token',
'add_pip_as_python_dependency',
'always_yes',
'always_copy',
'allow_softlinks',
'auto_update_conda',
'changeps1',
'use_pip',
'offline',
'binstar_upload',
'anaconda_upload',
'show_channel_urls',
'allow_other_channels',
'update_dependencies',
'channel_priority',
'shortcuts',
]
rc_string_keys = [
'ssl_verify',
'channel_alias',
]
# Not supported by conda config yet
rc_other = [
'proxy_servers',
]
root_dir = context.root_prefix
root_writable = context.root_writable
user_rc_path = abspath(expanduser('~/.condarc'))
sys_rc_path = join(sys.prefix, '.condarc')
get_rc_urls = lambda: context.channels
def get_local_urls():
from conda.models.channel import get_conda_build_local_url
return get_conda_build_local_url() or []
class RC(object):
def get(self, key, default=None):
return getattr(context, key, default)
rc = RC()
envs_dirs = context.envs_dirs
def get_rc_path():
path = os.getenv('CONDARC')
if path == ' ':
return None
if path:
return path
for path in user_rc_path, sys_rc_path:
if isfile(path):
return path
return None
rc_path = get_rc_path()
pkgs_dirs = context.pkgs_dirs
default_prefix = context.default_prefix
subdir = context.subdir
arch_name = context.arch_name
bits = context.bits
platform = context.platform
# put back because of conda build
default_python = context.default_python
binstar_upload = context.binstar_upload
| conda/base/context.py
--- a/conda/base/context.py
+++ b/conda/base/context.py
@@ -20,10 +20,10 @@
log = getLogger(__name__)
-
-default_python = '%d.%d' % sys.version_info[:2]
-
-# ----- operating system and architecture -----
+try:
+ import cio_test # NOQA
+except ImportError:
+ log.info("No cio_test package found.")
_platform_map = {
'linux2': 'linux',
@@ -40,8 +40,6 @@
class Context(Configuration):
- default_python = property(lambda self: default_python)
-
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
@@ -79,6 +77,11 @@ class Context(Configuration):
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
+ @property
+ def default_python(self):
+ ver = sys.version_info
+ return '%d.%d' % (ver.major, ver.minor)
+
@property
def arch_name(self):
m = machine()
conda/config.py
--- a/conda/config.py
+++ b/conda/config.py
@@ -106,5 +106,5 @@ def get_rc_path():
# put back because of conda build
-default_python = context. default_python
+default_python = context.default_python
binstar_upload = context.binstar_upload |
unsatisfiability crashes conda canary 4.2.5, rather than giving error output
I have a python 3 environment, with the lastest conda in conda-canary, that is
```
Current conda install:
platform : linux-64
conda version : 4.2.5
conda is private : False
conda-env version : 2.6.0
conda-build version : 1.19.2
python version : 2.7.12.final.0
requests version : 2.10.0
```
If I try to install something that won't fit, for instance
```
conda install MySQL-python
```
then I get
```
Traceback (most recent call last):
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/exceptions.py", line 471, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/main_install.py", line 80, in execute
install(args, parser, 'install')
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/install.py", line 370, in install
raise CondaError('UnsatisfiableSpecifications', e)
TypeError: __init__() takes exactly 2 arguments (3 given)
```
unsatisfiability crashes conda canary 4.2.5, rather than giving error output
I have a python 3 environment, with the lastest conda in conda-canary, that is
```
Current conda install:
platform : linux-64
conda version : 4.2.5
conda is private : False
conda-env version : 2.6.0
conda-build version : 1.19.2
python version : 2.7.12.final.0
requests version : 2.10.0
```
If I try to install something that won't fit, for instance
```
conda install MySQL-python
```
then I get
```
Traceback (most recent call last):
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/exceptions.py", line 471, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/main_install.py", line 80, in execute
install(args, parser, 'install')
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/install.py", line 370, in install
raise CondaError('UnsatisfiableSpecifications', e)
TypeError: __init__() takes exactly 2 arguments (3 given)
```
| conda/cli/install.py
<|code_start|>
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import errno
import logging
import os
import shutil
import tarfile
import tempfile
from difflib import get_close_matches
from os.path import abspath, basename, exists, isdir, join
from .. import CondaError, text_type
from .._vendor.auxlib.ish import dals
from ..api import get_index
from ..base.constants import ROOT_ENV_NAME
from ..base.context import check_write, context
from ..cli import common
from ..cli.find_commands import find_executable
from ..exceptions import (CondaAssertionError, CondaEnvironmentNotFoundError,
CondaFileNotFoundError, CondaIOError, CondaImportError, CondaOSError,
CondaRuntimeError, CondaSystemExit, CondaValueError,
DirectoryNotFoundError, DryRunExit, LockError, NoPackagesFoundError,
PackageNotFoundError, TooManyArgumentsError, UnsatisfiableError)
from ..install import is_linked, linked as install_linked, name_dist
from ..misc import append_env, clone_env, explicit, touch_nonadmin
from ..plan import (add_defaults_to_specs, display_actions, execute_actions, get_pinned_specs,
install_actions, is_root_prefix, nothing_to_do, revert_actions)
from ..resolve import Resolve
from ..utils import on_win
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
raise CondaFileNotFoundError(tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
explicit(paths, prefix, verbose=verbose)
shutil.rmtree(tmp_dir)
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == ROOT_ENV_NAME:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
if isdir(prefix) and not os.listdir(prefix):
return None
error = "prefix already exists: %s" % prefix
if error:
raise CondaValueError(error, json)
def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
raise DirectoryNotFoundError(src_arg, 'no such directory: %s' % src_arg, json)
else:
src_prefix = context.clone_src
if not json:
print("Source: %s" % src_prefix)
print("Destination: %s" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet,
index_args=index_args)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
if on_win:
message = dals("""
#
# To activate this environment, use:
# > activate %s
#
# To deactivate this environment, use:
# > deactivate %s
#
# * for power-users using bash, you must source
#
""")
else:
message = dals("""
#
# To activate this environment, use:
# > source activate %s
#
# To deactivate this environment, use:
# > source deactivate %s
#
""")
return message % (arg, arg)
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
CondaValueError("expected revision number, not: '%s'" % arg, json)
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
context.validate_all()
newenv = bool(command == 'create')
isupdate = bool(command == 'update')
isinstall = bool(command == 'install')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = context.prefix if newenv else context.prefix_w_legacy_search
if newenv:
check_prefix(prefix, json=context.json)
if context.force_32bit and is_root_prefix(prefix):
raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in root env")
if isupdate and not (args.file or args.all or args.packages):
raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)
linked = install_linked(prefix)
lnames = {name_dist(d) for d in linked}
if isupdate and not args.all:
for name in args.packages:
common.arg2spec(name, json=context.json, update=True)
if name not in lnames:
raise PackageNotFoundError(name, "Package '%s' is not installed in %s" %
(name, prefix))
if newenv and not args.no_default_packages:
default_packages = context.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in context.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_use_local(args)
common.ensure_override_channels_requires_channel(args)
index_args = {
'use_cache': args.use_index_cache,
'channel_urls': args.channel or (),
'unknown': args.unknown,
'prepend': not args.override_channels,
'use_local': args.use_local
}
specs = []
if args.file:
for fpath in args.file:
specs.extend(common.specs_from_url(fpath, json=context.json))
if '@EXPLICIT' in specs:
explicit(specs, prefix, verbose=not context.quiet, index_args=index_args)
return
elif getattr(args, 'all', False):
if not linked:
raise PackageNotFoundError('', "There are no packages installed in the "
"prefix %s" % prefix)
specs.extend(nm for nm in lnames)
specs.extend(common.specs_from_args(args.packages, json=context.json))
if isinstall and args.revision:
get_revision(args.revision, json=context.json)
elif isinstall and not (args.file or args.packages):
raise CondaValueError("too few arguments, "
"must supply command line package specs or --file")
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
explicit(args.packages, prefix, verbose=not context.quiet)
return
else:
raise CondaValueError("cannot mix specifications with conda package"
" filenames")
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
install_tar(prefix, tar_path, verbose=not context.quiet)
return
if newenv and args.clone:
package_diff = set(args.packages) - set(default_packages)
if package_diff:
raise TooManyArgumentsError(0, len(package_diff), list(package_diff),
'did not expect any arguments for --clone')
clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args)
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
return
index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'],
platform=None, use_local=index_args['use_local'],
use_cache=index_args['use_cache'], unknown=index_args['unknown'],
prefix=prefix)
r = Resolve(index)
ospecs = list(specs)
add_defaults_to_specs(r, linked, specs, update=isupdate)
# Don't update packages that are already up-to-date
if isupdate and not (args.all or args.force):
orig_packages = args.packages[:]
installed_metadata = [is_linked(prefix, dist) for dist in linked]
for name in orig_packages:
vers_inst = [m['version'] for m in installed_metadata if m['name'] == name]
build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name]
channel_inst = [m['channel'] for m in installed_metadata if m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
assert len(channel_inst) == 1, name
except AssertionError as e:
raise CondaAssertionError(text_type(e))
pkgs = sorted(r.get_pkgs(name))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if all([latest.version == vers_inst[0],
latest.build_number == build_inst[0],
latest.channel == channel_inst[0]]):
args.packages.remove(name)
if not args.packages:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
if args.no_deps:
only_names = set(s.split()[0] for s in ospecs)
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
raise CondaOSError("Error: could not create directory: %s" % prefix)
else:
raise CondaEnvironmentNotFoundError(prefix)
try:
if isinstall and args.revision:
actions = revert_actions(prefix, get_revision(args.revision), index)
else:
with common.json_progress_bars(json=context.json and not context.quiet):
actions = install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
always_copy=context.always_copy,
minimal_hint=args.alt_hint,
update_deps=context.update_dependencies)
except NoPackagesFoundError as e:
error_message = [e.args[0]]
if isupdate and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not context.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
for pkg in e.pkgs:
p = pkg.split()[0]
if p in args._skip:
# Avoid infinite recursion. This can happen if a spec
# comes from elsewhere, like --file
raise
args._skip.append(p)
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
nfound = 0
for pkg in sorted(e.pkgs):
pkg = pkg.split()[0]
if pkg in packages:
continue
close = get_close_matches(pkg, packages, cutoff=0.7)
if not close:
continue
if nfound == 0:
error_message.append("\n\nClose matches found; did you mean one of these?\n")
error_message.append("\n %s: %s" % (pkg, ', '.join(close)))
nfound += 1
error_message.append('\n\nYou can search for packages on anaconda.org with')
error_message.append('\n\n anaconda search -t conda %s' % pkg)
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message.append('\n\n(and similarly for the other packages)')
if not find_executable('anaconda', include_others=False):
error_message.append('\n\nYou may need to install the anaconda-client')
error_message.append(' command line client with')
error_message.append('\n\n conda install anaconda-client')
pinned_specs = get_pinned_specs(prefix)
if pinned_specs:
path = join(prefix, 'conda-meta', 'pinned')
error_message.append("\n\nNote that you have pinned specs in %s:" % path)
error_message.append("\n\n %r" % pinned_specs)
error_message = ''.join(error_message)
raise PackageNotFoundError('', error_message)
except (UnsatisfiableError, SystemExit) as e:
# Unsatisfiable package specifications/no such revision/import error
if e.args and 'could not import' in e.args[0]:
raise CondaImportError(text_type(e))
raise CondaError('UnsatisfiableSpecifications', e)
if nothing_to_do(actions) and not newenv:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
elif newenv:
# needed in the case of creating an empty env
from ..instructions import LINK, UNLINK, SYMLINK_CONDA
if not actions[LINK] and not actions[UNLINK]:
actions[SYMLINK_CONDA] = [context.root_dir]
if not context.json:
print()
print("Package plan for installation in environment %s:" % prefix)
display_actions(actions, index, show_channel_urls=context.show_channel_urls)
if command in {'install', 'update'}:
check_write(command, prefix)
if not context.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
raise DryRunExit
with common.json_progress_bars(json=context.json and not context.quiet):
try:
execute_actions(actions, index, verbose=not context.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise CondaIOError("Can't write the history file", e)
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
raise LockError('Already locked: %s' % text_type(e))
else:
raise CondaRuntimeError('RuntimeError: %s' % e)
except SystemExit as e:
raise CondaSystemExit('Exiting', e)
if newenv:
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
if context.json:
common.stdout_json_success(actions=actions)
<|code_end|>
| conda/cli/install.py
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import errno
import logging
import os
import shutil
import tarfile
import tempfile
from difflib import get_close_matches
from os.path import abspath, basename, exists, isdir, join
from .. import text_type
from .._vendor.auxlib.ish import dals
from ..api import get_index
from ..base.constants import ROOT_ENV_NAME
from ..base.context import check_write, context
from ..cli import common
from ..cli.find_commands import find_executable
from ..exceptions import (CondaAssertionError, CondaEnvironmentNotFoundError,
CondaFileNotFoundError, CondaIOError, CondaImportError, CondaOSError,
CondaRuntimeError, CondaSystemExit, CondaValueError,
DirectoryNotFoundError, DryRunExit, LockError, NoPackagesFoundError,
PackageNotFoundError, TooManyArgumentsError, UnsatisfiableError)
from ..install import is_linked, linked as install_linked, name_dist
from ..misc import append_env, clone_env, explicit, touch_nonadmin
from ..plan import (add_defaults_to_specs, display_actions, execute_actions, get_pinned_specs,
install_actions, is_root_prefix, nothing_to_do, revert_actions)
from ..resolve import Resolve
from ..utils import on_win
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
raise CondaFileNotFoundError(tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
explicit(paths, prefix, verbose=verbose)
shutil.rmtree(tmp_dir)
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == ROOT_ENV_NAME:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
if isdir(prefix) and not os.listdir(prefix):
return None
error = "prefix already exists: %s" % prefix
if error:
raise CondaValueError(error, json)
def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
raise DirectoryNotFoundError(src_arg, 'no such directory: %s' % src_arg, json)
else:
src_prefix = context.clone_src
if not json:
print("Source: %s" % src_prefix)
print("Destination: %s" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet,
index_args=index_args)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
if on_win:
message = dals("""
#
# To activate this environment, use:
# > activate %s
#
# To deactivate this environment, use:
# > deactivate %s
#
# * for power-users using bash, you must source
#
""")
else:
message = dals("""
#
# To activate this environment, use:
# > source activate %s
#
# To deactivate this environment, use:
# > source deactivate %s
#
""")
return message % (arg, arg)
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
CondaValueError("expected revision number, not: '%s'" % arg, json)
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
context.validate_all()
newenv = bool(command == 'create')
isupdate = bool(command == 'update')
isinstall = bool(command == 'install')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = context.prefix if newenv else context.prefix_w_legacy_search
if newenv:
check_prefix(prefix, json=context.json)
if context.force_32bit and is_root_prefix(prefix):
raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in root env")
if isupdate and not (args.file or args.all or args.packages):
raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)
linked = install_linked(prefix)
lnames = {name_dist(d) for d in linked}
if isupdate and not args.all:
for name in args.packages:
common.arg2spec(name, json=context.json, update=True)
if name not in lnames:
raise PackageNotFoundError(name, "Package '%s' is not installed in %s" %
(name, prefix))
if newenv and not args.no_default_packages:
default_packages = context.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in context.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_use_local(args)
common.ensure_override_channels_requires_channel(args)
index_args = {
'use_cache': args.use_index_cache,
'channel_urls': args.channel or (),
'unknown': args.unknown,
'prepend': not args.override_channels,
'use_local': args.use_local
}
specs = []
if args.file:
for fpath in args.file:
specs.extend(common.specs_from_url(fpath, json=context.json))
if '@EXPLICIT' in specs:
explicit(specs, prefix, verbose=not context.quiet, index_args=index_args)
return
elif getattr(args, 'all', False):
if not linked:
raise PackageNotFoundError('', "There are no packages installed in the "
"prefix %s" % prefix)
specs.extend(nm for nm in lnames)
specs.extend(common.specs_from_args(args.packages, json=context.json))
if isinstall and args.revision:
get_revision(args.revision, json=context.json)
elif isinstall and not (args.file or args.packages):
raise CondaValueError("too few arguments, "
"must supply command line package specs or --file")
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
explicit(args.packages, prefix, verbose=not context.quiet)
return
else:
raise CondaValueError("cannot mix specifications with conda package"
" filenames")
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
install_tar(prefix, tar_path, verbose=not context.quiet)
return
if newenv and args.clone:
package_diff = set(args.packages) - set(default_packages)
if package_diff:
raise TooManyArgumentsError(0, len(package_diff), list(package_diff),
'did not expect any arguments for --clone')
clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args)
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
return
index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'],
platform=None, use_local=index_args['use_local'],
use_cache=index_args['use_cache'], unknown=index_args['unknown'],
prefix=prefix)
r = Resolve(index)
ospecs = list(specs)
add_defaults_to_specs(r, linked, specs, update=isupdate)
# Don't update packages that are already up-to-date
if isupdate and not (args.all or args.force):
orig_packages = args.packages[:]
installed_metadata = [is_linked(prefix, dist) for dist in linked]
for name in orig_packages:
vers_inst = [m['version'] for m in installed_metadata if m['name'] == name]
build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name]
channel_inst = [m['channel'] for m in installed_metadata if m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
assert len(channel_inst) == 1, name
except AssertionError as e:
raise CondaAssertionError(text_type(e))
pkgs = sorted(r.get_pkgs(name))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if all([latest.version == vers_inst[0],
latest.build_number == build_inst[0],
latest.channel == channel_inst[0]]):
args.packages.remove(name)
if not args.packages:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
if args.no_deps:
only_names = set(s.split()[0] for s in ospecs)
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
raise CondaOSError("Error: could not create directory: %s" % prefix)
else:
raise CondaEnvironmentNotFoundError(prefix)
try:
if isinstall and args.revision:
actions = revert_actions(prefix, get_revision(args.revision), index)
else:
with common.json_progress_bars(json=context.json and not context.quiet):
actions = install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
always_copy=context.always_copy,
minimal_hint=args.alt_hint,
update_deps=context.update_dependencies)
except NoPackagesFoundError as e:
error_message = [e.args[0]]
if isupdate and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not context.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
for pkg in e.pkgs:
p = pkg.split()[0]
if p in args._skip:
# Avoid infinite recursion. This can happen if a spec
# comes from elsewhere, like --file
raise
args._skip.append(p)
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
nfound = 0
for pkg in sorted(e.pkgs):
pkg = pkg.split()[0]
if pkg in packages:
continue
close = get_close_matches(pkg, packages, cutoff=0.7)
if not close:
continue
if nfound == 0:
error_message.append("\n\nClose matches found; did you mean one of these?\n")
error_message.append("\n %s: %s" % (pkg, ', '.join(close)))
nfound += 1
error_message.append('\n\nYou can search for packages on anaconda.org with')
error_message.append('\n\n anaconda search -t conda %s' % pkg)
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message.append('\n\n(and similarly for the other packages)')
if not find_executable('anaconda', include_others=False):
error_message.append('\n\nYou may need to install the anaconda-client')
error_message.append(' command line client with')
error_message.append('\n\n conda install anaconda-client')
pinned_specs = get_pinned_specs(prefix)
if pinned_specs:
path = join(prefix, 'conda-meta', 'pinned')
error_message.append("\n\nNote that you have pinned specs in %s:" % path)
error_message.append("\n\n %r" % pinned_specs)
error_message = ''.join(error_message)
raise PackageNotFoundError('', error_message)
except (UnsatisfiableError, SystemExit) as e:
# Unsatisfiable package specifications/no such revision/import error
if e.args and 'could not import' in e.args[0]:
raise CondaImportError(text_type(e))
raise
if nothing_to_do(actions) and not newenv:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
elif newenv:
# needed in the case of creating an empty env
from ..instructions import LINK, UNLINK, SYMLINK_CONDA
if not actions[LINK] and not actions[UNLINK]:
actions[SYMLINK_CONDA] = [context.root_dir]
if not context.json:
print()
print("Package plan for installation in environment %s:" % prefix)
display_actions(actions, index, show_channel_urls=context.show_channel_urls)
if command in {'install', 'update'}:
check_write(command, prefix)
if not context.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
raise DryRunExit
with common.json_progress_bars(json=context.json and not context.quiet):
try:
execute_actions(actions, index, verbose=not context.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise CondaIOError("Can't write the history file", e)
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
raise LockError('Already locked: %s' % text_type(e))
else:
raise CondaRuntimeError('RuntimeError: %s' % e)
except SystemExit as e:
raise CondaSystemExit('Exiting', e)
if newenv:
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
if context.json:
common.stdout_json_success(actions=actions)
| conda/cli/install.py
--- a/conda/cli/install.py
+++ b/conda/cli/install.py
@@ -15,7 +15,7 @@
from difflib import get_close_matches
from os.path import abspath, basename, exists, isdir, join
-from .. import CondaError, text_type
+from .. import text_type
from .._vendor.auxlib.ish import dals
from ..api import get_index
from ..base.constants import ROOT_ENV_NAME
@@ -367,7 +367,7 @@ def install(args, parser, command='install'):
# Unsatisfiable package specifications/no such revision/import error
if e.args and 'could not import' in e.args[0]:
raise CondaImportError(text_type(e))
- raise CondaError('UnsatisfiableSpecifications', e)
+ raise
if nothing_to_do(actions) and not newenv:
from .main_list import print_packages |
unexpected file locking warnings and errors - nuissance messages
I am getting errors and warnings related to locking when I activate an environment as a user. The only other activity that is going on is that the account used to manage the central install is running conda build to build a package - but it is doing so from an environment called 'manage' that is distinct from the environment the user account is trying to activate.
I would call these nuisance messages since I am able to activate the environment.
Here is conda info
```
(ana-1.0.0) psanaphi101: ~/mlearn $ conda info
Current conda install:
platform : linux-64
conda version : 4.2.5
conda is private : False
conda-env version : 2.6.0
conda-build version : 1.19.2
python version : 2.7.12.final.0
requests version : 2.10.0
root environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7 (read only)
default environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0
envs directories : /reg/neh/home/davidsch/.conda/envs
/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs
package cache : /reg/neh/home/davidsch/.conda/envs/.pkgs
/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs
channel URLs : file:///reg/g/psdm/sw/conda/channels/system-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/system-rhel7/noarch/
file:///reg/g/psdm/sw/conda/channels/external-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/external-rhel7/noarch/
https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
file:///reg/g/psdm/sw/conda/channels/psana-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/psana-rhel7/noarch/
https://conda.anaconda.org/scikit-beam/linux-64/
https://conda.anaconda.org/scikit-beam/noarch/
file:///reg/g/psdm/sw/conda/channels/testing-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/testing-rhel7/noarch/
config file : /reg/neh/home/davidsch/.condarc
offline mode : False
```
and here is the output when activating the environment
```
psanaphi101: ~ $ source activate ana-1.0.0
-bash: /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/bin/conda: /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/bin/python: bad interpreter: No such file or directory
psanaphi101: ~ $ source activate ana-1.0.0
WARNING conda.common.disk:rm_rf(195): Failed to remove /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/conda.
ERROR conda.common.disk:rm_rf(215): rm_rf failed for /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/conda
WARNING conda.common.disk:rm_rf(195): Failed to remove /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/activate.
ERROR conda.common.disk:rm_rf(215): rm_rf failed for /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/activate
WARNING conda.common.disk:rm_rf(195): Failed to remove /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/deactivate.
ERROR conda.common.disk:rm_rf(215): rm_rf failed for /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/deactivate
```
unexpected file locking warnings and errors - nuissance messages
I am getting errors and warnings related to locking when I activate an environment as a user. The only other activity that is going on is that the account used to manage the central install is running conda build to build a package - but it is doing so from an environment called 'manage' that is distinct from the environment the user account is trying to activate.
I would call these nuisance messages since I am able to activate the environment.
Here is conda info
```
(ana-1.0.0) psanaphi101: ~/mlearn $ conda info
Current conda install:
platform : linux-64
conda version : 4.2.5
conda is private : False
conda-env version : 2.6.0
conda-build version : 1.19.2
python version : 2.7.12.final.0
requests version : 2.10.0
root environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7 (read only)
default environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0
envs directories : /reg/neh/home/davidsch/.conda/envs
/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs
package cache : /reg/neh/home/davidsch/.conda/envs/.pkgs
/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs
channel URLs : file:///reg/g/psdm/sw/conda/channels/system-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/system-rhel7/noarch/
file:///reg/g/psdm/sw/conda/channels/external-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/external-rhel7/noarch/
https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
file:///reg/g/psdm/sw/conda/channels/psana-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/psana-rhel7/noarch/
https://conda.anaconda.org/scikit-beam/linux-64/
https://conda.anaconda.org/scikit-beam/noarch/
file:///reg/g/psdm/sw/conda/channels/testing-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/testing-rhel7/noarch/
config file : /reg/neh/home/davidsch/.condarc
offline mode : False
```
and here is the output when activating the environment
```
psanaphi101: ~ $ source activate ana-1.0.0
-bash: /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/bin/conda: /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/bin/python: bad interpreter: No such file or directory
psanaphi101: ~ $ source activate ana-1.0.0
WARNING conda.common.disk:rm_rf(195): Failed to remove /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/conda.
ERROR conda.common.disk:rm_rf(215): rm_rf failed for /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/conda
WARNING conda.common.disk:rm_rf(195): Failed to remove /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/activate.
ERROR conda.common.disk:rm_rf(215): rm_rf failed for /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/activate
WARNING conda.common.disk:rm_rf(195): Failed to remove /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/deactivate.
ERROR conda.common.disk:rm_rf(215): rm_rf failed for /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/ana-1.0.0/bin/deactivate
```
| conda/common/disk.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from errno import EACCES, EEXIST, ENOENT, EPERM
from itertools import chain
from logging import getLogger
from os import W_OK, access, chmod, getpid, makedirs, rename, stat, unlink, walk, listdir
from os.path import basename, dirname, exists, isdir, isfile, islink, join, abspath
from shutil import rmtree
from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISLNK, S_ISREG, S_IWRITE
from time import sleep
from uuid import uuid4
from ..compat import text_type
from ..utils import on_win
__all__ = ["rm_rf", "exp_backoff_fn", "try_write"]
log = getLogger(__name__)
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
Args:
dir_path (str): directory to test write access
heavy (bool): Actually create and delete a file, or do a faster os.access test.
https://docs.python.org/dev/library/os.html?highlight=xattr#os.access
Returns:
bool
"""
if not isdir(dir_path):
return False
if on_win or heavy:
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
backoff_unlink(temp_filename)
return True
except (IOError, OSError):
return False
finally:
backoff_unlink(temp_filename)
else:
return access(dir_path, W_OK)
def backoff_unlink(file_or_symlink_path):
def _unlink(path):
make_writable(path)
unlink(path)
try:
exp_backoff_fn(lambda f: exists(f) and _unlink(f), file_or_symlink_path)
except (IOError, OSError) as e:
if e.errno not in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
def backoff_rmdir(dirpath):
if not isdir(dirpath):
return
# shutil.rmtree:
# if onerror is set, it is called to handle the error with arguments (func, path, exc_info)
# where func is os.listdir, os.remove, or os.rmdir;
# path is the argument to that function that caused it to fail; and
# exc_info is a tuple returned by sys.exc_info() ==> (type, value, traceback).
def retry(func, path, exc_info):
if getattr(exc_info[1], 'errno', None) == ENOENT:
return
recursive_make_writable(dirname(path))
func(path)
def _rmdir(path):
try:
recursive_make_writable(path)
exp_backoff_fn(rmtree, path, onerror=retry)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
for root, dirs, files in walk(dirpath, topdown=False):
for file in files:
backoff_unlink(join(root, file))
for dir in dirs:
_rmdir(join(root, dir))
_rmdir(dirpath)
def make_writable(path):
try:
mode = stat(path).st_mode
if S_ISDIR(mode):
chmod(path, S_IMODE(mode) | S_IWRITE | S_IEXEC)
elif S_ISREG(mode) or S_ISLNK(mode):
chmod(path, S_IMODE(mode) | S_IWRITE)
else:
log.debug("path cannot be made writable: %s", path)
except Exception as e:
eno = getattr(e, 'errno', None)
if eno in (ENOENT,):
raise
elif eno in (EACCES, EPERM):
log.debug("tried make writable but failed: %s\n%r", path, e)
else:
log.error("Error making path writable: %s\n%r", path, e)
raise
def recursive_make_writable(path):
# The need for this function was pointed out at
# https://github.com/conda/conda/issues/3266#issuecomment-239241915
# Especially on windows, file removal will often fail because it is marked read-only
if isdir(path):
for root, dirs, files in walk(path):
for path in chain.from_iterable((files, dirs)):
try:
exp_backoff_fn(make_writable, join(root, path))
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
else:
exp_backoff_fn(make_writable, path)
def exp_backoff_fn(fn, *args, **kwargs):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args, **kwargs)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
max_tries = 7
for n in range(max_tries):
try:
result = fn(*args, **kwargs)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (EPERM, EACCES):
if n == max_tries-1:
raise
sleep_time = ((2 ** n) + random.random()) * 0.1
caller_frame = sys._getframe(1)
log.debug("retrying %s/%s %s() in %g sec",
basename(caller_frame.f_code.co_filename),
caller_frame.f_lineno, fn.__name__,
sleep_time)
sleep(sleep_time)
elif e.errno in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
else:
log.error("Uncaught backoff with errno %d", e.errno)
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is 5. This only applies
to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
try:
path = abspath(path)
log.debug("rm_rf %s", path)
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
backoff_unlink(path)
return True
except (OSError, IOError) as e:
log.debug("%r errno %d\nCannot unlink %s.", e, e.errno, path)
if trash:
move_result = move_path_to_trash(path)
if move_result:
return True
log.warn("Failed to remove %s.", path)
elif isdir(path):
try:
# On Windows, always move to trash first.
if trash and on_win:
move_result = move_path_to_trash(path, preclean=False)
if move_result:
return True
backoff_rmdir(path)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
from conda.install import delete_linked_data_any
delete_linked_data_any(path)
else:
log.debug("rm_rf failed. Not a link, file, or directory: %s", path)
return True
finally:
if exists(path):
log.error("rm_rf failed for %s", path)
return False
def delete_trash(prefix=None):
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not exists(trash_dir):
log.debug("Trash directory %s doesn't exist. Moving on.", trash_dir)
continue
log.debug("removing trash for %s", trash_dir)
for p in listdir(trash_dir):
path = join(trash_dir, p)
try:
if isdir(path):
backoff_rmdir(path)
else:
backoff_unlink(path)
except (IOError, OSError) as e:
log.info("Could not delete path in trash dir %s\n%r", path, e)
if listdir(trash_dir):
log.warn("Unable to clean trash directory %s", trash_dir)
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
makedirs(trash_dir)
except (IOError, OSError) as e1:
if e1.errno != EEXIST:
continue
trash_file = join(trash_dir, text_type(uuid4()))
try:
rename(path, trash_file)
except (IOError, OSError) as e:
log.debug("Could not move %s to %s.\n%r", path, trash_file, e)
else:
log.debug("Moved to trash: %s", path)
from ..install import delete_linked_data_any
delete_linked_data_any(path)
return True
return False
<|code_end|>
| conda/common/disk.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from errno import EACCES, EEXIST, ENOENT, EPERM
from itertools import chain
from logging import getLogger
from os import W_OK, access, chmod, getpid, makedirs, rename, stat, unlink, walk, listdir
from os.path import basename, dirname, exists, isdir, isfile, islink, join, abspath
from shutil import rmtree
from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISLNK, S_ISREG, S_IWRITE
from time import sleep
from uuid import uuid4
from ..compat import text_type
from ..utils import on_win
__all__ = ["rm_rf", "exp_backoff_fn", "try_write"]
log = getLogger(__name__)
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
Args:
dir_path (str): directory to test write access
heavy (bool): Actually create and delete a file, or do a faster os.access test.
https://docs.python.org/dev/library/os.html?highlight=xattr#os.access
Returns:
bool
"""
if not isdir(dir_path):
return False
if on_win or heavy:
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
backoff_unlink(temp_filename)
return True
except (IOError, OSError):
return False
finally:
backoff_unlink(temp_filename)
else:
return access(dir_path, W_OK)
def backoff_unlink(file_or_symlink_path):
def _unlink(path):
make_writable(path)
unlink(path)
try:
exp_backoff_fn(lambda f: exists(f) and _unlink(f), file_or_symlink_path)
except (IOError, OSError) as e:
if e.errno not in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
def backoff_rmdir(dirpath):
if not isdir(dirpath):
return
# shutil.rmtree:
# if onerror is set, it is called to handle the error with arguments (func, path, exc_info)
# where func is os.listdir, os.remove, or os.rmdir;
# path is the argument to that function that caused it to fail; and
# exc_info is a tuple returned by sys.exc_info() ==> (type, value, traceback).
def retry(func, path, exc_info):
if getattr(exc_info[1], 'errno', None) == ENOENT:
return
recursive_make_writable(dirname(path))
func(path)
def _rmdir(path):
try:
recursive_make_writable(path)
exp_backoff_fn(rmtree, path, onerror=retry)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
for root, dirs, files in walk(dirpath, topdown=False):
for file in files:
backoff_unlink(join(root, file))
for dir in dirs:
_rmdir(join(root, dir))
_rmdir(dirpath)
def make_writable(path):
try:
mode = stat(path).st_mode
if S_ISDIR(mode):
chmod(path, S_IMODE(mode) | S_IWRITE | S_IEXEC)
elif S_ISREG(mode) or S_ISLNK(mode):
chmod(path, S_IMODE(mode) | S_IWRITE)
else:
log.debug("path cannot be made writable: %s", path)
except Exception as e:
eno = getattr(e, 'errno', None)
if eno in (ENOENT,):
raise
elif eno in (EACCES, EPERM):
log.debug("tried make writable but failed: %s\n%r", path, e)
else:
log.error("Error making path writable: %s\n%r", path, e)
raise
def recursive_make_writable(path):
# The need for this function was pointed out at
# https://github.com/conda/conda/issues/3266#issuecomment-239241915
# Especially on windows, file removal will often fail because it is marked read-only
if isdir(path):
for root, dirs, files in walk(path):
for path in chain.from_iterable((files, dirs)):
try:
exp_backoff_fn(make_writable, join(root, path))
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
else:
exp_backoff_fn(make_writable, path)
def exp_backoff_fn(fn, *args, **kwargs):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args, **kwargs)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
max_tries = 7
for n in range(max_tries):
try:
result = fn(*args, **kwargs)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (EPERM, EACCES):
if n == max_tries-1:
raise
sleep_time = ((2 ** n) + random.random()) * 0.1
caller_frame = sys._getframe(1)
log.debug("retrying %s/%s %s() in %g sec",
basename(caller_frame.f_code.co_filename),
caller_frame.f_lineno, fn.__name__,
sleep_time)
sleep(sleep_time)
elif e.errno in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
else:
log.error("Uncaught backoff with errno %d", e.errno)
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is 5. This only applies
to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
try:
path = abspath(path)
log.debug("rm_rf %s", path)
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
backoff_unlink(path)
return True
except (OSError, IOError) as e:
log.debug("%r errno %d\nCannot unlink %s.", e, e.errno, path)
if trash:
move_result = move_path_to_trash(path)
if move_result:
return True
log.info("Failed to remove %s.", path)
elif isdir(path):
try:
# On Windows, always move to trash first.
if trash and on_win:
move_result = move_path_to_trash(path, preclean=False)
if move_result:
return True
backoff_rmdir(path)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
from conda.install import delete_linked_data_any
delete_linked_data_any(path)
else:
log.debug("rm_rf failed. Not a link, file, or directory: %s", path)
return True
finally:
if exists(path):
log.info("rm_rf failed for %s", path)
return False
def delete_trash(prefix=None):
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not exists(trash_dir):
log.debug("Trash directory %s doesn't exist. Moving on.", trash_dir)
continue
log.debug("removing trash for %s", trash_dir)
for p in listdir(trash_dir):
path = join(trash_dir, p)
try:
if isdir(path):
backoff_rmdir(path)
else:
backoff_unlink(path)
except (IOError, OSError) as e:
log.info("Could not delete path in trash dir %s\n%r", path, e)
if listdir(trash_dir):
log.warn("Unable to clean trash directory %s", trash_dir)
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
makedirs(trash_dir)
except (IOError, OSError) as e1:
if e1.errno != EEXIST:
continue
trash_file = join(trash_dir, text_type(uuid4()))
try:
rename(path, trash_file)
except (IOError, OSError) as e:
log.debug("Could not move %s to %s.\n%r", path, trash_file, e)
else:
log.debug("Moved to trash: %s", path)
from ..install import delete_linked_data_any
delete_linked_data_any(path)
return True
return False
| conda/common/disk.py
--- a/conda/common/disk.py
+++ b/conda/common/disk.py
@@ -192,7 +192,7 @@ def rm_rf(path, max_retries=5, trash=True):
move_result = move_path_to_trash(path)
if move_result:
return True
- log.warn("Failed to remove %s.", path)
+ log.info("Failed to remove %s.", path)
elif isdir(path):
try:
@@ -212,7 +212,7 @@ def rm_rf(path, max_retries=5, trash=True):
return True
finally:
if exists(path):
- log.error("rm_rf failed for %s", path)
+ log.info("rm_rf failed for %s", path)
return False
|
conda build - exception - padding error
I just updated to the lastest conda-build in defaults, and latest conda conda-env in conda canary, when I run conda build I am getting a 'package error'. Here is my conda environemnt:
```
platform : linux-64
conda version : 4.2.5
conda is private : False
conda-env version : 2.6.0
conda-build version : 2.0.1
python version : 2.7.12.final.0
requests version : 2.10.0
root environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7 (writable)
default environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7
envs directories : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs
package cache : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs
channel URLs : file:///reg/g/psdm/sw/conda/channels/system-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/system-rhel7/noarch/
file:///reg/g/psdm/sw/conda/channels/psana-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/psana-rhel7/noarch/
file:///reg/g/psdm/sw/conda/channels/external-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/external-rhel7/noarch/
https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
https://conda.anaconda.org/scikit-beam/linux-64/
https://conda.anaconda.org/scikit-beam/noarch/
file:///reg/g/psdm/sw/conda/channels/testing-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/testing-rhel7/noarch/
config file : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/.condarc
offline mode : False
```
and here is the build output
```
SIT_ARCH=x86_64-rhel7-gcc48-opt conda build -c file:///reg/g/psdm/sw/conda/channels/system-rhel7 -c file:///reg/g/psdm/sw/conda/channels/psana-rhel7 -c file:///reg/g/psdm/sw/conda/channels/external-rhel7 --quiet recipes/psana/psana-conda-opt 2>&1 | tee -a /reg/g/psdm/sw/conda/logs/conda_build_psana_psana-conda-1.0.2-py27_1_rhel7_linux-64.log
/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/environ.py:303: UserWarning: The environment variable 'SIT_ROOT' is undefined.
UserWarning
BUILD START: psana-conda-1.0.2-py27_1
(actual version deferred until further download or env creation)
The following NEW packages will be INSTALLED:
boost: 1.57.0-4
cairo: 1.12.18-6
cycler: 0.10.0-py27_0
cython: 0.24.1-py27_0
fontconfig: 2.11.1-6
freetype: 2.5.5-1
h5py: 2.5.0-py27_hdf518_mpi4py2_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
hdf5: 1.8.17-openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
icu: 54.1-0
libgfortran: 3.0.0-1
libpng: 1.6.22-0
libsodium: 1.0.10-0
libxml2: 2.9.2-0
matplotlib: 1.5.1-np111py27_0
mkl: 11.3.3-0
mpi4py: 2.0.0-py27_openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
mysql: 5.5.24-0
ndarray: 1.1.5-0 file:///reg/g/psdm/sw/conda/channels/psana-rhel7
numexpr: 2.6.1-np111py27_0
numpy: 1.11.1-py27_0
openmpi: 1.10.3-lsf_verbs_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
openssl: 1.0.2h-1
pip: 8.1.2-py27_0
pixman: 0.32.6-0
pycairo: 1.10.0-py27_0
pyparsing: 2.1.4-py27_0
pyqt: 4.11.4-py27_4
python: 2.7.12-1
python-dateutil: 2.5.3-py27_0
pytz: 2016.6.1-py27_0
pyzmq: 15.4.0-py27_0
qt: 4.8.5-0
readline: 6.2-2
scipy: 0.18.0-np111py27_0
scons: 2.3.0-py27_0
setuptools: 26.1.1-py27_0
sip: 4.18-py27_0
six: 1.10.0-py27_0
sqlite: 3.13.0-0
szip: 2.1-100 file:///reg/g/psdm/sw/conda/channels/external-rhel7
tables: 3.2.3.1-py27_hdf18_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
tk: 8.5.18-0
wheel: 0.29.0-py27_0
zeromq: 4.1.4-0
zlib: 1.2.8-3
The following NEW packages will be INSTALLED:
boost: 1.57.0-4
cairo: 1.12.18-6
cycler: 0.10.0-py27_0
cython: 0.24.1-py27_0
fontconfig: 2.11.1-6
freetype: 2.5.5-1
h5py: 2.5.0-py27_hdf518_mpi4py2_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
hdf5: 1.8.17-openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
icu: 54.1-0
libgfortran: 3.0.0-1
libpng: 1.6.22-0
libsodium: 1.0.10-0
libxml2: 2.9.2-0
matplotlib: 1.5.1-np111py27_0
mkl: 11.3.3-0
mpi4py: 2.0.0-py27_openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
mysql: 5.5.24-0
ndarray: 1.1.5-0 file:///reg/g/psdm/sw/conda/channels/psana-rhel7
numexpr: 2.6.1-np111py27_0
numpy: 1.11.1-py27_0
openmpi: 1.10.3-lsf_verbs_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
openssl: 1.0.2h-1
pip: 8.1.2-py27_0
pixman: 0.32.6-0
pycairo: 1.10.0-py27_0
pyparsing: 2.1.4-py27_0
pyqt: 4.11.4-py27_4
python: 2.7.12-1
python-dateutil: 2.5.3-py27_0
pytz: 2016.6.1-py27_0
pyzmq: 15.4.0-py27_0
qt: 4.8.5-0
readline: 6.2-2
scipy: 0.18.0-np111py27_0
scons: 2.3.0-py27_0
setuptools: 26.1.1-py27_0
sip: 4.18-py27_0
six: 1.10.0-py27_0
sqlite: 3.13.0-0
szip: 2.1-100 file:///reg/g/psdm/sw/conda/channels/external-rhel7
tables: 3.2.3.1-py27_hdf18_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
tk: 8.5.18-0
wheel: 0.29.0-py27_0
zeromq: 4.1.4-0
zlib: 1.2.8-3
Traceback (most recent call last):
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/bin/conda-build", line 6, in <module>
sys.exit(conda_build.cli.main_build.main())
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/cli/main_build.py", line 239, in main
execute(sys.argv[1:])
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/cli/main_build.py", line 231, in execute
already_built=None, config=config)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/api.py", line 83, in build
need_source_download=need_source_download, config=config)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/build.py", line 998, in build_tree
config=recipe_config)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/build.py", line 558, in build
create_env(config.build_prefix, specs, config=config)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/build.py", line 451, in create_env
clear_cache=clear_cache)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/build.py", line 427, in create_env
plan.execute_actions(actions, index, verbose=config.debug)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/plan.py", line 643, in execute_actions
inst.execute_instructions(plan, index, verbose)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/instructions.py", line 132, in execute_instructions
cmd(state, arg)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/instructions.py", line 77, in LINK_CMD
link(state['prefix'], dist, lt, index=state['index'])
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/install.py", line 973, in link
(placeholder, dist))
conda.exceptions.PaddingError: Padding error:
finished running conda build. logfile: /reg/g/psdm/sw/conda/logs/conda_build_psana_psana-conda-1.0.2-py27_1_rhel7_linux-64.log
sh: -c: line 0: unexpected EOF while looking for matching `''
sh: -c: line 1: syntax error: unexpected end of file
```
An older version of the recipe can be found here
https://github.com/davidslac/manage-lcls-conda-build-system/tree/master/recipes/psana/psana-conda-opt
After backing conda-build back down to 1.19, things look like they are working again, at least conda made the build environment and is running my build script
conda build - exception - padding error
I just updated to the lastest conda-build in defaults, and latest conda conda-env in conda canary, when I run conda build I am getting a 'package error'. Here is my conda environemnt:
```
platform : linux-64
conda version : 4.2.5
conda is private : False
conda-env version : 2.6.0
conda-build version : 2.0.1
python version : 2.7.12.final.0
requests version : 2.10.0
root environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7 (writable)
default environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7
envs directories : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs
package cache : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs
channel URLs : file:///reg/g/psdm/sw/conda/channels/system-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/system-rhel7/noarch/
file:///reg/g/psdm/sw/conda/channels/psana-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/psana-rhel7/noarch/
file:///reg/g/psdm/sw/conda/channels/external-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/external-rhel7/noarch/
https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
https://conda.anaconda.org/scikit-beam/linux-64/
https://conda.anaconda.org/scikit-beam/noarch/
file:///reg/g/psdm/sw/conda/channels/testing-rhel7/linux-64/
file:///reg/g/psdm/sw/conda/channels/testing-rhel7/noarch/
config file : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/.condarc
offline mode : False
```
and here is the build output
```
SIT_ARCH=x86_64-rhel7-gcc48-opt conda build -c file:///reg/g/psdm/sw/conda/channels/system-rhel7 -c file:///reg/g/psdm/sw/conda/channels/psana-rhel7 -c file:///reg/g/psdm/sw/conda/channels/external-rhel7 --quiet recipes/psana/psana-conda-opt 2>&1 | tee -a /reg/g/psdm/sw/conda/logs/conda_build_psana_psana-conda-1.0.2-py27_1_rhel7_linux-64.log
/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/environ.py:303: UserWarning: The environment variable 'SIT_ROOT' is undefined.
UserWarning
BUILD START: psana-conda-1.0.2-py27_1
(actual version deferred until further download or env creation)
The following NEW packages will be INSTALLED:
boost: 1.57.0-4
cairo: 1.12.18-6
cycler: 0.10.0-py27_0
cython: 0.24.1-py27_0
fontconfig: 2.11.1-6
freetype: 2.5.5-1
h5py: 2.5.0-py27_hdf518_mpi4py2_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
hdf5: 1.8.17-openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
icu: 54.1-0
libgfortran: 3.0.0-1
libpng: 1.6.22-0
libsodium: 1.0.10-0
libxml2: 2.9.2-0
matplotlib: 1.5.1-np111py27_0
mkl: 11.3.3-0
mpi4py: 2.0.0-py27_openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
mysql: 5.5.24-0
ndarray: 1.1.5-0 file:///reg/g/psdm/sw/conda/channels/psana-rhel7
numexpr: 2.6.1-np111py27_0
numpy: 1.11.1-py27_0
openmpi: 1.10.3-lsf_verbs_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
openssl: 1.0.2h-1
pip: 8.1.2-py27_0
pixman: 0.32.6-0
pycairo: 1.10.0-py27_0
pyparsing: 2.1.4-py27_0
pyqt: 4.11.4-py27_4
python: 2.7.12-1
python-dateutil: 2.5.3-py27_0
pytz: 2016.6.1-py27_0
pyzmq: 15.4.0-py27_0
qt: 4.8.5-0
readline: 6.2-2
scipy: 0.18.0-np111py27_0
scons: 2.3.0-py27_0
setuptools: 26.1.1-py27_0
sip: 4.18-py27_0
six: 1.10.0-py27_0
sqlite: 3.13.0-0
szip: 2.1-100 file:///reg/g/psdm/sw/conda/channels/external-rhel7
tables: 3.2.3.1-py27_hdf18_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
tk: 8.5.18-0
wheel: 0.29.0-py27_0
zeromq: 4.1.4-0
zlib: 1.2.8-3
The following NEW packages will be INSTALLED:
boost: 1.57.0-4
cairo: 1.12.18-6
cycler: 0.10.0-py27_0
cython: 0.24.1-py27_0
fontconfig: 2.11.1-6
freetype: 2.5.5-1
h5py: 2.5.0-py27_hdf518_mpi4py2_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
hdf5: 1.8.17-openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
icu: 54.1-0
libgfortran: 3.0.0-1
libpng: 1.6.22-0
libsodium: 1.0.10-0
libxml2: 2.9.2-0
matplotlib: 1.5.1-np111py27_0
mkl: 11.3.3-0
mpi4py: 2.0.0-py27_openmpi_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
mysql: 5.5.24-0
ndarray: 1.1.5-0 file:///reg/g/psdm/sw/conda/channels/psana-rhel7
numexpr: 2.6.1-np111py27_0
numpy: 1.11.1-py27_0
openmpi: 1.10.3-lsf_verbs_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
openssl: 1.0.2h-1
pip: 8.1.2-py27_0
pixman: 0.32.6-0
pycairo: 1.10.0-py27_0
pyparsing: 2.1.4-py27_0
pyqt: 4.11.4-py27_4
python: 2.7.12-1
python-dateutil: 2.5.3-py27_0
pytz: 2016.6.1-py27_0
pyzmq: 15.4.0-py27_0
qt: 4.8.5-0
readline: 6.2-2
scipy: 0.18.0-np111py27_0
scons: 2.3.0-py27_0
setuptools: 26.1.1-py27_0
sip: 4.18-py27_0
six: 1.10.0-py27_0
sqlite: 3.13.0-0
szip: 2.1-100 file:///reg/g/psdm/sw/conda/channels/external-rhel7
tables: 3.2.3.1-py27_hdf18_100 file:///reg/g/psdm/sw/conda/channels/system-rhel7
tk: 8.5.18-0
wheel: 0.29.0-py27_0
zeromq: 4.1.4-0
zlib: 1.2.8-3
Traceback (most recent call last):
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/bin/conda-build", line 6, in <module>
sys.exit(conda_build.cli.main_build.main())
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/cli/main_build.py", line 239, in main
execute(sys.argv[1:])
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/cli/main_build.py", line 231, in execute
already_built=None, config=config)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/api.py", line 83, in build
need_source_download=need_source_download, config=config)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/build.py", line 998, in build_tree
config=recipe_config)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/build.py", line 558, in build
create_env(config.build_prefix, specs, config=config)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/build.py", line 451, in create_env
clear_cache=clear_cache)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda_build/build.py", line 427, in create_env
plan.execute_actions(actions, index, verbose=config.debug)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/plan.py", line 643, in execute_actions
inst.execute_instructions(plan, index, verbose)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/instructions.py", line 132, in execute_instructions
cmd(state, arg)
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/instructions.py", line 77, in LINK_CMD
link(state['prefix'], dist, lt, index=state['index'])
File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/install.py", line 973, in link
(placeholder, dist))
conda.exceptions.PaddingError: Padding error:
finished running conda build. logfile: /reg/g/psdm/sw/conda/logs/conda_build_psana_psana-conda-1.0.2-py27_1_rhel7_linux-64.log
sh: -c: line 0: unexpected EOF while looking for matching `''
sh: -c: line 1: syntax error: unexpected end of file
```
An older version of the recipe can be found here
https://github.com/davidslac/manage-lcls-conda-build-system/tree/master/recipes/psana/psana-conda-opt
After backing conda-build back down to 1.19, things look like they are working again, at least conda made the build environment and is running my build script
| conda/exceptions.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
import sys
from logging import getLogger
from traceback import format_exc
from . import CondaError, text_type
from .compat import iteritems, iterkeys
log = logging.getLogger(__name__)
class LockError(CondaError, RuntimeError):
def __init__(self, message):
msg = "Lock error: %s" % message
super(LockError, self).__init__(msg)
class ArgumentError(CondaError):
def __init__(self, message, **kwargs):
super(ArgumentError, self).__init__(message, **kwargs)
class CommandArgumentError(ArgumentError):
def __init__(self, message, **kwargs):
command = ' '.join(sys.argv)
super(CommandArgumentError, self).__init__(message, command=command, **kwargs)
class ArgumentNotFoundError(ArgumentError):
def __init__(self, argument, *args):
self.argument = argument
msg = 'Argument not found: %s. %s' \
% (argument, ' '.join(text_type(arg) for arg in self.args))
super(ArgumentNotFoundError, self).__init__(msg)
class TooManyArgumentsError(ArgumentError):
def __init__(self, expected, received, offending_arguments, optional_message='',
*args):
self.expected = expected
self.received = received
self.offending_arguments = offending_arguments
self.optional_message = optional_message
suffix = 's' if received - expected > 1 else ''
msg = ('Too many arguments: %s. Got %s argument%s (%s) and expected %s.' %
(optional_message, received, suffix, ', '.join(offending_arguments), expected))
super(TooManyArgumentsError, self).__init__(msg, *args)
class TooFewArgumentsError(ArgumentError):
def __init__(self, expected, received, optional_message='', *args):
self.expected = expected
self.received = received
self.optional_message = optional_message
msg = 'Too few arguments: %s. Got %s arguments and expected %s.' %\
(optional_message, received, expected)
super(TooFewArgumentsError, self).__init__(msg, *args)
class CommandError(CondaError):
def __init__(self, command, message):
self.command = command
extra_info = ' '.join(text_type(arg) for arg in self.args)
msg = "Command Error: error with command '%s'. %s %s" % (command, message, extra_info)
super(CommandError, self).__init__(msg)
class CommandNotFoundError(CommandError):
def __init__(self, command, message):
self.command = command
msg = "Command not found: '%s'. %s" % (command, message)
super(CommandNotFoundError, self).__init__(command, msg)
class CondaFileNotFoundError(CondaError, OSError):
def __init__(self, filename, *args):
self.filename = filename
msg = "File not found: '%s'." % filename
super(CondaFileNotFoundError, self).__init__(msg, *args)
class DirectoryNotFoundError(CondaError):
def __init__(self, directory, message, *args):
self.directory = directory
msg = 'Directory not found: %s' % directory
super(DirectoryNotFoundError, self).__init__(msg)
class CondaEnvironmentNotFoundError(CondaError, EnvironmentError):
""" Raised when a requested environment cannot be found.
args:
environment_name_or_prefix (str): either the name or location of an environment
"""
def __init__(self, environment_name_or_prefix, *args, **kwargs):
msg = ("Could not find environment: %s .\n"
"You can list all discoverable environments with `conda info --envs`."
% environment_name_or_prefix)
self.environment_name_or_prefix = environment_name_or_prefix
super(CondaEnvironmentNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaEnvironmentError(CondaError, EnvironmentError):
def __init__(self, message, *args):
msg = 'Environment error: %s' % message
super(CondaEnvironmentError, self).__init__(msg, *args)
class DryRunExit(CondaError):
def __init__(self, message):
msg = 'Dry run exiting: %s' % message
super(DryRunExit, self).__init__(msg)
class CondaSystemExit(CondaError, SystemExit):
def __init__(self, *args):
msg = ' '.join(text_type(arg) for arg in self.args)
super(CondaSystemExit, self).__init__(msg)
class SubprocessExit(CondaError):
def __init__(self, *args, **kwargs):
super(SubprocessExit, self).__init__(*args, **kwargs)
class PaddingError(CondaError):
def __init__(self, *args):
msg = 'Padding error: %s' % ' '.join(text_type(arg) for arg in self.args)
super(PaddingError, self).__init__(msg)
class LinkError(CondaError):
def __init__(self, message):
msg = 'Link error: %s ' % message
super(LinkError, self).__init__(msg)
class CondaOSError(CondaError, OSError):
def __init__(self, message):
msg = 'OS error: %s' % message
super(CondaOSError, self).__init__(msg)
class ProxyError(CondaError):
def __init__(self, message):
msg = 'Proxy error: %s' % message
super(ProxyError, self).__init__(msg)
class CondaIOError(CondaError, IOError):
def __init__(self, message, *args):
msg = 'IO error: %s' % message
super(CondaIOError, self).__init__(msg)
class CondaFileIOError(CondaIOError):
def __init__(self, filepath, message, *args):
self.filepath = filepath
msg = "Couldn't read or write to file. '%s'. %s" % (filepath, message)
super(CondaFileIOError, self).__init__(msg, *args)
class CondaKeyError(CondaError, KeyError):
def __init__(self, key, message, *args):
self.key = key
self.msg = "Error with key '%s': %s" % (key, message)
super(CondaKeyError, self).__init__(self.msg, *args)
class ChannelError(CondaError):
def __init__(self, message, *args):
msg = 'Channel Error: %s' % message
super(ChannelError, self).__init__(msg)
class ChannelNotAllowed(ChannelError):
def __init__(self, message, *args):
msg = 'Channel not allowed: %s' % message
super(ChannelNotAllowed, self).__init__(msg, *args)
class CondaImportError(CondaError, ImportError):
def __init__(self, message):
msg = 'Import error: %s' % message
super(CondaImportError, self).__init__(msg)
class ParseError(CondaError):
def __init__(self, message):
msg = 'Parse error: %s' % message
super(ParseError, self).__init__(msg)
class CouldntParseError(ParseError):
def __init__(self, reason):
self.reason = reason
super(CouldntParseError, self).__init__(self.args[0])
class MD5MismatchError(CondaError):
def __init__(self, message):
msg = 'MD5MismatchError: %s' % message
super(MD5MismatchError, self).__init__(msg)
class PackageNotFoundError(CondaError):
def __init__(self, package_name, message, *args):
self.package_name = package_name
msg = "Package not found: '%s' %s" % (package_name, message)
super(PackageNotFoundError, self).__init__(msg)
class CondaHTTPError(CondaError):
def __init__(self, message):
msg = 'HTTP Error: %s' % message
super(CondaHTTPError, self).__init__(msg)
class CondaRevisionError(CondaError):
def __init__(self, message):
msg = 'Revision Error :%s' % message
super(CondaRevisionError, self).__init__(msg)
class AuthenticationError(CondaError):
pass
class NoPackagesFoundError(CondaError, RuntimeError):
'''An exception to report that requested packages are missing.
Args:
bad_deps: a list of tuples of MatchSpecs, assumed to be dependency
chains, from top level to bottom.
Returns:
Raises an exception with a formatted message detailing the
missing packages and/or dependencies.
'''
def __init__(self, bad_deps):
from .resolve import dashlist
from .base.context import context
deps = set(q[-1].spec for q in bad_deps)
if all(len(q) > 1 for q in bad_deps):
what = "Dependencies" if len(bad_deps) > 1 else "Dependency"
elif all(len(q) == 1 for q in bad_deps):
what = "Packages" if len(bad_deps) > 1 else "Package"
else:
what = "Packages/dependencies"
bad_deps = dashlist(' -> '.join(map(str, q)) for q in bad_deps)
msg = '%s missing in current %s channels: %s' % (what, context.subdir, bad_deps)
super(NoPackagesFoundError, self).__init__(msg)
self.pkgs = deps
class UnsatisfiableError(CondaError, RuntimeError):
'''An exception to report unsatisfiable dependencies.
Args:
bad_deps: a list of tuples of objects (likely MatchSpecs).
chains: (optional) if True, the tuples are interpreted as chains
of dependencies, from top level to bottom. If False, the tuples
are interpreted as simple lists of conflicting specs.
Returns:
Raises an exception with a formatted message detailing the
unsatisfiable specifications.
'''
def __init__(self, bad_deps, chains=True):
from .resolve import dashlist, MatchSpec
bad_deps = [list(map(lambda x: x.spec, dep)) for dep in bad_deps]
if chains:
chains = {}
for dep in sorted(bad_deps, key=len, reverse=True):
dep1 = [str(MatchSpec(s)).partition(' ') for s in dep[1:]]
key = (dep[0],) + tuple(v[0] for v in dep1)
vals = ('',) + tuple(v[2] for v in dep1)
found = False
for key2, csets in iteritems(chains):
if key2[:len(key)] == key:
for cset, val in zip(csets, vals):
cset.add(val)
found = True
if not found:
chains[key] = [{val} for val in vals]
bad_deps = []
for key, csets in iteritems(chains):
deps = []
for name, cset in zip(key, csets):
if '' not in cset:
pass
elif len(cset) == 1:
cset.clear()
else:
cset.remove('')
cset.add('*')
if name[0] == '@':
name = 'feature:' + name[1:]
deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name)
chains[key] = ' -> '.join(deps)
bad_deps = [chains[key] for key in sorted(iterkeys(chains))]
msg = '''The following specifications were found to be in conflict:%s
Use "conda info <package>" to see the dependencies for each package.'''
else:
bad_deps = [sorted(dep) for dep in bad_deps]
bad_deps = [', '.join(dep) for dep in sorted(bad_deps)]
msg = '''The following specifications were found to be incompatible with the
others, or with the existing package set:%s
Use "conda info <package>" to see the dependencies for each package.'''
msg = msg % dashlist(bad_deps)
super(UnsatisfiableError, self).__init__(msg)
class InstallError(CondaError):
def __init__(self, message):
msg = 'Install error: %s' % message
super(InstallError, self).__init__(msg)
class RemoveError(CondaError):
def __init__(self, message):
msg = 'Remove Error: %s' % message
super(RemoveError, self).__init__(msg)
class CondaIndexError(CondaError, IndexError):
def __init__(self, message):
msg = 'Index error: %s' % message
super(CondaIndexError, self).__init__(msg)
class CondaRuntimeError(CondaError, RuntimeError):
def __init__(self, message):
msg = 'Runtime error: %s' % message
super(CondaRuntimeError, self).__init__(msg)
class CondaValueError(CondaError, ValueError):
def __init__(self, message, *args):
msg = 'Value error: %s' % message
super(CondaValueError, self).__init__(msg)
class CondaTypeError(CondaError, TypeError):
def __init__(self, expected_type, received_type, optional_message):
msg = "Type error: expected type '%s' and got type '%s'. %s"
super(CondaTypeError, self).__init__(msg)
class CondaAssertionError(CondaError, AssertionError):
def __init__(self, message):
msg = 'Assertion error: %s' % message
super(CondaAssertionError, self).__init__(msg)
class CondaHistoryError(CondaError):
def __init__(self, message):
msg = 'History error: %s' % message
super(CondaHistoryError, self).__init__(msg)
class CondaSignatureError(CondaError):
def __init__(self, message):
msg = 'Signature error: %s' % message
super(CondaSignatureError, self).__init__(msg)
def print_conda_exception(exception):
from conda.base.context import context
stdoutlogger = getLogger('stdout')
stderrlogger = getLogger('stderr')
if context.json:
import json
# stdoutlogger.info('https://anaconda.org/t/fjffjelk3jl4TGEGGjl343/username/package/')
# stdoutlogger.info('https://hello.world.com/t/fjffjelk3jl4TGEGGjl343/username/package/')
# stdoutlogger.info('https://helloworld.com/t/fjffjelk3jl4TGEGGjl343/username/package/')
# stdoutlogger.info('http://helloworld.com/t/fjffjelk3jl4TGEGGjl343/username/package/')
# stdoutlogger.info('http://helloworld.com:8888/t/fjffjelk3jl4TGEGGjl343/username/package/')
stdoutlogger.info(json.dumps(exception.dump_map(), indent=2, sort_keys=True))
else:
stderrlogger.info(repr(exception))
def get_info():
from conda.cli import conda_argparse
from conda.cli.main_info import configure_parser
from shlex import split
from conda.common.io import captured
p = conda_argparse.ArgumentParser()
sub_parsers = p.add_subparsers(metavar='command', dest='cmd')
configure_parser(sub_parsers)
args = p.parse_args(split("info"))
with captured() as c:
args.func(args, p)
return c.stdout, c.stderr
def print_unexpected_error_message(e):
# bomb = "\U0001F4A3 "
# explosion = "\U0001F4A5 "
# fire = "\U0001F525 "
# print("%s %s %s" % (3*bomb, 3*explosion, 3*fire))
traceback = format_exc()
stderrlogger = getLogger('stderr')
from conda.base.context import context
if context.json:
from conda.cli.common import stdout_json
stdout_json(dict(error=traceback))
else:
message = """\
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
"""
stderrlogger.info(message)
command = ' '.join(sys.argv)
if ' info' not in command:
# get and print `conda info`
info_stdout, info_stderr = get_info()
stderrlogger.info(info_stdout if info_stdout else info_stderr)
stderrlogger.info("`$ {0}`".format(command))
stderrlogger.info('\n')
stderrlogger.info('\n'.join(' ' + line for line in traceback.splitlines()))
def delete_lock(extra_path=None):
"""
Delete lock on exception accoding to pid
log warning when delete fails
Args:
extra_path : The extra path that you want to search and
delete locks
"""
from .cli.main_clean import find_lock
from .lock import LOCK_EXTENSION
from .install import rm_rf
file_end = "%s.%s" % (os.getpid(), LOCK_EXTENSION)
locks = list(find_lock(file_ending=file_end, extra_path=extra_path))
failed_delete = []
for path in locks:
try:
rm_rf(path)
except (OSError, IOError) as e:
failed_delete.append(path)
log.warn("%r Cannot unlink %s.", e, path)
if failed_delete:
log.warn("Unable to remove all for this processlocks.\n"
"Please run `conda clean --lock`.")
def conda_exception_handler(func, *args, **kwargs):
try:
return_value = func(*args, **kwargs)
if isinstance(return_value, int):
return return_value
except CondaRuntimeError as e:
print_unexpected_error_message(e)
delete_lock()
return 1
except CondaError as e:
from conda.base.context import context
if context.debug:
print_unexpected_error_message(e)
else:
print_conda_exception(e)
delete_lock()
return 1
except Exception as e:
print_unexpected_error_message(e)
delete_lock()
return 1
<|code_end|>
conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import struct
import subprocess
import sys
import tarfile
import traceback
from collections import namedtuple
from enum import Enum
from itertools import chain
from os.path import (abspath, basename, dirname, exists, isdir, isfile, islink, join, normcase,
normpath)
from . import CondaError
from .base.constants import UTF8
from .base.context import context
from .common.disk import exp_backoff_fn, rm_rf
from .common.url import path_to_url
from .exceptions import CondaOSError, LinkError, PaddingError
from .lock import DirectoryLock, FileLock
from .models.channel import Channel
from .utils import on_win
# conda-build compatibility
from .common.disk import delete_trash, move_to_trash # NOQA
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise CondaOSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise CondaOSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise CondaOSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
SHEBANG_REGEX = re.compile(br'^(#!((?:\\ |[^ \n\r])+)(.*))')
class FileMode(Enum):
text = 'text'
binary = 'binary'
def __str__(self):
return "%s" % self.value
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise CondaError("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def yield_lines(path):
"""Generator function for lines in file. Empty generator if path does not exist.
Args:
path (str): path to file
Returns:
iterator: each line in file, not starting with '#'
"""
try:
with open(path) as fh:
for line in fh:
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
raise StopIteration
else:
raise
PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
# backwards compatibility for conda-build
prefix_placeholder = PREFIX_PLACEHOLDER
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filepaths to tuples(placeholder, FileMode)
A line in `has_prefix` contains one of
* filepath
* placeholder mode filepath
mode values are one of
* text
* binary
"""
ParseResult = namedtuple('ParseResult', ('placeholder', 'filemode', 'filepath'))
def parse_line(line):
# placeholder, filemode, filepath
parts = tuple(x.strip('"\'') for x in shlex.split(line, posix=False))
if len(parts) == 1:
return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0])
elif len(parts) == 3:
return ParseResult(parts[0], FileMode(parts[1]), parts[2])
else:
raise RuntimeError("Invalid has_prefix file at path: %s" % path)
parsed_lines = (parse_line(line) for line in yield_lines(path))
return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines}
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
if on_win and has_pyzzer_entry_point(data):
return replace_pyzzer_entry_point_shebang(data, a, b)
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
data = pat.sub(replace, data)
assert len(data) == original_data_len
return data
def replace_long_shebang(mode, data):
if mode is FileMode.text:
shebang_match = SHEBANG_REGEX.match(data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode(UTF8).split('/')[-1]
new_shebang = '#!/usr/bin/env %s%s' % (executable_name, options.decode(UTF8))
data = data.replace(whole_shebang, new_shebang.encode(UTF8))
else:
# TODO: binary shebangs exist; figure this out in the future if text works well
log.debug("TODO: binary shebangs exist; figure this out in the future if text works well")
return data
def has_pyzzer_entry_point(data):
pos = data.rfind(b'PK\x05\x06')
return pos >= 0
def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
"""Code adapted from pyzzer. This is meant to deal with entry point exe's created by distlib,
which consist of a launcher, then a shebang, then a zip archive of the entry point code to run.
We need to change the shebang.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112 # NOQA
"""
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
launcher = shebang = None
pos = all_data.rfind(b'PK\x05\x06')
if pos >= 0:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
if data and shebang and launcher:
if hasattr(placeholder, 'encode'):
placeholder = placeholder.encode('utf-8')
if hasattr(new_prefix, 'encode'):
new_prefix = new_prefix.encode('utf-8')
shebang = shebang.replace(placeholder, new_prefix)
all_data = b"".join([launcher, shebang, data])
return all_data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode is FileMode.text:
data = data.replace(placeholder.encode(UTF8), new_prefix.encode(UTF8))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode(UTF8), new_prefix.encode(UTF8))
else:
raise RuntimeError("Invalid mode: %r" % mode)
return data
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
if on_win and mode is FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode(UTF8)
except IOError:
pass
return None
def read_no_link(info_dir):
return set(chain(yield_lines(join(info_dir, 'no_link')),
yield_lines(join(info_dir, 'no_softlink'))))
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
rm_rf(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = url
_, schannel = Channel(url).url_channel_wtf
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
# import pdb; pdb.set_trace()
for pdir in context.pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in context.pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[path_to_url(fname)]
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("File not removed during RM_FETCHED instruction: %s", fname)
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_FETCHED instruction: %s", fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_EXTRACTED instruction: %s", fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
path = fname[:-8]
with FileLock(path):
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None, ignore_channels=False):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = Channel(url).url_channel_wtf
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
if ignore_channels:
linked_data_[prefix][dname] = rec
else:
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
rm_rf(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix, ignore_channels=False):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5], ignore_channels=ignore_channels)
return recs
def linked(prefix, ignore_channels=False):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix, ignore_channels=ignore_channels).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
log.debug("linking package %s with link type %s", dist, linktype)
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
raise LinkError('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
# for the lock issue
# may run into lock if prefix not exist
if not isdir(prefix):
os.makedirs(prefix)
with DirectoryLock(prefix), FileLock(source_dir):
for filepath in files:
src = join(source_dir, filepath)
dst = join(prefix, filepath)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.info("file exists, but clobbering: %r" % dst)
rm_rf(dst)
lt = linktype
if filepath in has_prefix_files or filepath in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
raise CondaOSError('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for filepath in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[filepath]
try:
update_prefix(join(prefix, filepath), prefix, placeholder, mode)
except PaddingError:
raise PaddingError("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if context.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
raise LinkError("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
if isfile(alt_files_path):
# alt_files_path is a hack for noarch
meta_dict['files'] = list(yield_lines(alt_files_path))
else:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with DirectoryLock(prefix):
log.debug("unlinking package %s", dist)
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
# remove empty directories
for path in sorted(dst_dirs2, key=len, reverse=True):
if isdir(path) and not os.listdir(path):
rm_rf(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
<|code_end|>
| conda/exceptions.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
import sys
from logging import getLogger
from traceback import format_exc
from . import CondaError, text_type
from .compat import iteritems, iterkeys
log = logging.getLogger(__name__)
class LockError(CondaError, RuntimeError):
def __init__(self, message):
msg = "Lock error: %s" % message
super(LockError, self).__init__(msg)
class ArgumentError(CondaError):
def __init__(self, message, **kwargs):
super(ArgumentError, self).__init__(message, **kwargs)
class CommandArgumentError(ArgumentError):
def __init__(self, message, **kwargs):
command = ' '.join(sys.argv)
super(CommandArgumentError, self).__init__(message, command=command, **kwargs)
class ArgumentNotFoundError(ArgumentError):
def __init__(self, argument, *args):
self.argument = argument
msg = 'Argument not found: %s. %s' \
% (argument, ' '.join(text_type(arg) for arg in self.args))
super(ArgumentNotFoundError, self).__init__(msg)
class TooManyArgumentsError(ArgumentError):
def __init__(self, expected, received, offending_arguments, optional_message='',
*args):
self.expected = expected
self.received = received
self.offending_arguments = offending_arguments
self.optional_message = optional_message
suffix = 's' if received - expected > 1 else ''
msg = ('Too many arguments: %s. Got %s argument%s (%s) and expected %s.' %
(optional_message, received, suffix, ', '.join(offending_arguments), expected))
super(TooManyArgumentsError, self).__init__(msg, *args)
class TooFewArgumentsError(ArgumentError):
def __init__(self, expected, received, optional_message='', *args):
self.expected = expected
self.received = received
self.optional_message = optional_message
msg = 'Too few arguments: %s. Got %s arguments and expected %s.' %\
(optional_message, received, expected)
super(TooFewArgumentsError, self).__init__(msg, *args)
class CommandError(CondaError):
def __init__(self, command, message):
self.command = command
extra_info = ' '.join(text_type(arg) for arg in self.args)
msg = "Command Error: error with command '%s'. %s %s" % (command, message, extra_info)
super(CommandError, self).__init__(msg)
class CommandNotFoundError(CommandError):
def __init__(self, command, message):
self.command = command
msg = "Command not found: '%s'. %s" % (command, message)
super(CommandNotFoundError, self).__init__(command, msg)
class CondaFileNotFoundError(CondaError, OSError):
def __init__(self, filename, *args):
self.filename = filename
msg = "File not found: '%s'." % filename
super(CondaFileNotFoundError, self).__init__(msg, *args)
class DirectoryNotFoundError(CondaError):
def __init__(self, directory, message, *args):
self.directory = directory
msg = 'Directory not found: %s' % directory
super(DirectoryNotFoundError, self).__init__(msg)
class CondaEnvironmentNotFoundError(CondaError, EnvironmentError):
""" Raised when a requested environment cannot be found.
args:
environment_name_or_prefix (str): either the name or location of an environment
"""
def __init__(self, environment_name_or_prefix, *args, **kwargs):
msg = ("Could not find environment: %s .\n"
"You can list all discoverable environments with `conda info --envs`."
% environment_name_or_prefix)
self.environment_name_or_prefix = environment_name_or_prefix
super(CondaEnvironmentNotFoundError, self).__init__(msg, *args, **kwargs)
class CondaEnvironmentError(CondaError, EnvironmentError):
def __init__(self, message, *args):
msg = 'Environment error: %s' % message
super(CondaEnvironmentError, self).__init__(msg, *args)
class DryRunExit(CondaError):
def __init__(self, message):
msg = 'Dry run exiting: %s' % message
super(DryRunExit, self).__init__(msg)
class CondaSystemExit(CondaError, SystemExit):
def __init__(self, *args):
msg = ' '.join(text_type(arg) for arg in self.args)
super(CondaSystemExit, self).__init__(msg)
class SubprocessExit(CondaError):
def __init__(self, *args, **kwargs):
super(SubprocessExit, self).__init__(*args, **kwargs)
class PaddingError(CondaError):
def __init__(self, dist, placeholder, placeholder_length):
msg = ("Placeholder of length '%d' too short in package %s.\n"
"The package must be rebuilt with conda-build > 2.0." % (placeholder_length, dist))
super(PaddingError, self).__init__(msg)
class LinkError(CondaError):
def __init__(self, message):
msg = 'Link error: %s ' % message
super(LinkError, self).__init__(msg)
class CondaOSError(CondaError, OSError):
def __init__(self, message):
msg = 'OS error: %s' % message
super(CondaOSError, self).__init__(msg)
class ProxyError(CondaError):
def __init__(self, message):
msg = 'Proxy error: %s' % message
super(ProxyError, self).__init__(msg)
class CondaIOError(CondaError, IOError):
def __init__(self, message, *args):
msg = 'IO error: %s' % message
super(CondaIOError, self).__init__(msg)
class CondaFileIOError(CondaIOError):
def __init__(self, filepath, message, *args):
self.filepath = filepath
msg = "Couldn't read or write to file. '%s'. %s" % (filepath, message)
super(CondaFileIOError, self).__init__(msg, *args)
class CondaKeyError(CondaError, KeyError):
def __init__(self, key, message, *args):
self.key = key
self.msg = "Error with key '%s': %s" % (key, message)
super(CondaKeyError, self).__init__(self.msg, *args)
class ChannelError(CondaError):
def __init__(self, message, *args):
msg = 'Channel Error: %s' % message
super(ChannelError, self).__init__(msg)
class ChannelNotAllowed(ChannelError):
def __init__(self, message, *args):
msg = 'Channel not allowed: %s' % message
super(ChannelNotAllowed, self).__init__(msg, *args)
class CondaImportError(CondaError, ImportError):
def __init__(self, message):
msg = 'Import error: %s' % message
super(CondaImportError, self).__init__(msg)
class ParseError(CondaError):
def __init__(self, message):
msg = 'Parse error: %s' % message
super(ParseError, self).__init__(msg)
class CouldntParseError(ParseError):
def __init__(self, reason):
self.reason = reason
super(CouldntParseError, self).__init__(self.args[0])
class MD5MismatchError(CondaError):
def __init__(self, message):
msg = 'MD5MismatchError: %s' % message
super(MD5MismatchError, self).__init__(msg)
class PackageNotFoundError(CondaError):
def __init__(self, package_name, message, *args):
self.package_name = package_name
msg = "Package not found: '%s' %s" % (package_name, message)
super(PackageNotFoundError, self).__init__(msg)
class CondaHTTPError(CondaError):
def __init__(self, message):
msg = 'HTTP Error: %s' % message
super(CondaHTTPError, self).__init__(msg)
class CondaRevisionError(CondaError):
def __init__(self, message):
msg = 'Revision Error :%s' % message
super(CondaRevisionError, self).__init__(msg)
class AuthenticationError(CondaError):
pass
class NoPackagesFoundError(CondaError, RuntimeError):
'''An exception to report that requested packages are missing.
Args:
bad_deps: a list of tuples of MatchSpecs, assumed to be dependency
chains, from top level to bottom.
Returns:
Raises an exception with a formatted message detailing the
missing packages and/or dependencies.
'''
def __init__(self, bad_deps):
from .resolve import dashlist
from .base.context import context
deps = set(q[-1].spec for q in bad_deps)
if all(len(q) > 1 for q in bad_deps):
what = "Dependencies" if len(bad_deps) > 1 else "Dependency"
elif all(len(q) == 1 for q in bad_deps):
what = "Packages" if len(bad_deps) > 1 else "Package"
else:
what = "Packages/dependencies"
bad_deps = dashlist(' -> '.join(map(str, q)) for q in bad_deps)
msg = '%s missing in current %s channels: %s' % (what, context.subdir, bad_deps)
super(NoPackagesFoundError, self).__init__(msg)
self.pkgs = deps
class UnsatisfiableError(CondaError, RuntimeError):
'''An exception to report unsatisfiable dependencies.
Args:
bad_deps: a list of tuples of objects (likely MatchSpecs).
chains: (optional) if True, the tuples are interpreted as chains
of dependencies, from top level to bottom. If False, the tuples
are interpreted as simple lists of conflicting specs.
Returns:
Raises an exception with a formatted message detailing the
unsatisfiable specifications.
'''
def __init__(self, bad_deps, chains=True):
from .resolve import dashlist, MatchSpec
bad_deps = [list(map(lambda x: x.spec, dep)) for dep in bad_deps]
if chains:
chains = {}
for dep in sorted(bad_deps, key=len, reverse=True):
dep1 = [str(MatchSpec(s)).partition(' ') for s in dep[1:]]
key = (dep[0],) + tuple(v[0] for v in dep1)
vals = ('',) + tuple(v[2] for v in dep1)
found = False
for key2, csets in iteritems(chains):
if key2[:len(key)] == key:
for cset, val in zip(csets, vals):
cset.add(val)
found = True
if not found:
chains[key] = [{val} for val in vals]
bad_deps = []
for key, csets in iteritems(chains):
deps = []
for name, cset in zip(key, csets):
if '' not in cset:
pass
elif len(cset) == 1:
cset.clear()
else:
cset.remove('')
cset.add('*')
if name[0] == '@':
name = 'feature:' + name[1:]
deps.append('%s %s' % (name, '|'.join(sorted(cset))) if cset else name)
chains[key] = ' -> '.join(deps)
bad_deps = [chains[key] for key in sorted(iterkeys(chains))]
msg = '''The following specifications were found to be in conflict:%s
Use "conda info <package>" to see the dependencies for each package.'''
else:
bad_deps = [sorted(dep) for dep in bad_deps]
bad_deps = [', '.join(dep) for dep in sorted(bad_deps)]
msg = '''The following specifications were found to be incompatible with the
others, or with the existing package set:%s
Use "conda info <package>" to see the dependencies for each package.'''
msg = msg % dashlist(bad_deps)
super(UnsatisfiableError, self).__init__(msg)
class InstallError(CondaError):
def __init__(self, message):
msg = 'Install error: %s' % message
super(InstallError, self).__init__(msg)
class RemoveError(CondaError):
def __init__(self, message):
msg = 'Remove Error: %s' % message
super(RemoveError, self).__init__(msg)
class CondaIndexError(CondaError, IndexError):
def __init__(self, message):
msg = 'Index error: %s' % message
super(CondaIndexError, self).__init__(msg)
class CondaRuntimeError(CondaError, RuntimeError):
def __init__(self, message):
msg = 'Runtime error: %s' % message
super(CondaRuntimeError, self).__init__(msg)
class CondaValueError(CondaError, ValueError):
def __init__(self, message, *args):
msg = 'Value error: %s' % message
super(CondaValueError, self).__init__(msg)
class CondaTypeError(CondaError, TypeError):
def __init__(self, expected_type, received_type, optional_message):
msg = "Type error: expected type '%s' and got type '%s'. %s"
super(CondaTypeError, self).__init__(msg)
class CondaAssertionError(CondaError, AssertionError):
def __init__(self, message):
msg = 'Assertion error: %s' % message
super(CondaAssertionError, self).__init__(msg)
class CondaHistoryError(CondaError):
def __init__(self, message):
msg = 'History error: %s' % message
super(CondaHistoryError, self).__init__(msg)
class CondaSignatureError(CondaError):
def __init__(self, message):
msg = 'Signature error: %s' % message
super(CondaSignatureError, self).__init__(msg)
def print_conda_exception(exception):
from conda.base.context import context
stdoutlogger = getLogger('stdout')
stderrlogger = getLogger('stderr')
if context.json:
import json
# stdoutlogger.info('https://anaconda.org/t/fjffjelk3jl4TGEGGjl343/username/package/')
# stdoutlogger.info('https://hello.world.com/t/fjffjelk3jl4TGEGGjl343/username/package/')
# stdoutlogger.info('https://helloworld.com/t/fjffjelk3jl4TGEGGjl343/username/package/')
# stdoutlogger.info('http://helloworld.com/t/fjffjelk3jl4TGEGGjl343/username/package/')
# stdoutlogger.info('http://helloworld.com:8888/t/fjffjelk3jl4TGEGGjl343/username/package/')
stdoutlogger.info(json.dumps(exception.dump_map(), indent=2, sort_keys=True))
else:
stderrlogger.info(repr(exception))
def get_info():
from conda.cli import conda_argparse
from conda.cli.main_info import configure_parser
from shlex import split
from conda.common.io import captured
p = conda_argparse.ArgumentParser()
sub_parsers = p.add_subparsers(metavar='command', dest='cmd')
configure_parser(sub_parsers)
args = p.parse_args(split("info"))
with captured() as c:
args.func(args, p)
return c.stdout, c.stderr
def print_unexpected_error_message(e):
# bomb = "\U0001F4A3 "
# explosion = "\U0001F4A5 "
# fire = "\U0001F525 "
# print("%s %s %s" % (3*bomb, 3*explosion, 3*fire))
traceback = format_exc()
stderrlogger = getLogger('stderr')
from conda.base.context import context
if context.json:
from conda.cli.common import stdout_json
stdout_json(dict(error=traceback))
else:
message = """\
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
"""
stderrlogger.info(message)
command = ' '.join(sys.argv)
if ' info' not in command:
# get and print `conda info`
info_stdout, info_stderr = get_info()
stderrlogger.info(info_stdout if info_stdout else info_stderr)
stderrlogger.info("`$ {0}`".format(command))
stderrlogger.info('\n')
stderrlogger.info('\n'.join(' ' + line for line in traceback.splitlines()))
def delete_lock(extra_path=None):
"""
Delete lock on exception accoding to pid
log warning when delete fails
Args:
extra_path : The extra path that you want to search and
delete locks
"""
from .cli.main_clean import find_lock
from .lock import LOCK_EXTENSION
from .install import rm_rf
file_end = "%s.%s" % (os.getpid(), LOCK_EXTENSION)
locks = list(find_lock(file_ending=file_end, extra_path=extra_path))
failed_delete = []
for path in locks:
try:
rm_rf(path)
except (OSError, IOError) as e:
failed_delete.append(path)
log.warn("%r Cannot unlink %s.", e, path)
if failed_delete:
log.warn("Unable to remove all for this processlocks.\n"
"Please run `conda clean --lock`.")
def conda_exception_handler(func, *args, **kwargs):
try:
return_value = func(*args, **kwargs)
if isinstance(return_value, int):
return return_value
except CondaRuntimeError as e:
print_unexpected_error_message(e)
delete_lock()
return 1
except CondaError as e:
from conda.base.context import context
if context.debug:
print_unexpected_error_message(e)
else:
print_conda_exception(e)
delete_lock()
return 1
except Exception as e:
print_unexpected_error_message(e)
delete_lock()
return 1
conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import struct
import subprocess
import sys
import tarfile
import traceback
from collections import namedtuple
from enum import Enum
from itertools import chain
from os.path import (abspath, basename, dirname, exists, isdir, isfile, islink, join, normcase,
normpath)
from . import CondaError
from .base.constants import UTF8
from .base.context import context
from .common.disk import exp_backoff_fn, rm_rf
from .common.url import path_to_url
from .exceptions import CondaOSError, LinkError, PaddingError
from .lock import DirectoryLock, FileLock
from .models.channel import Channel
from .utils import on_win
# conda-build compatibility
from .common.disk import delete_trash, move_to_trash # NOQA
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise CondaOSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise CondaOSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise CondaOSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
SHEBANG_REGEX = re.compile(br'^(#!((?:\\ |[^ \n\r])+)(.*))')
class FileMode(Enum):
text = 'text'
binary = 'binary'
def __str__(self):
return "%s" % self.value
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise CondaError("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def yield_lines(path):
"""Generator function for lines in file. Empty generator if path does not exist.
Args:
path (str): path to file
Returns:
iterator: each line in file, not starting with '#'
"""
try:
with open(path) as fh:
for line in fh:
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
raise StopIteration
else:
raise
PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
# backwards compatibility for conda-build
prefix_placeholder = PREFIX_PLACEHOLDER
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filepaths to tuples(placeholder, FileMode)
A line in `has_prefix` contains one of
* filepath
* placeholder mode filepath
mode values are one of
* text
* binary
"""
ParseResult = namedtuple('ParseResult', ('placeholder', 'filemode', 'filepath'))
def parse_line(line):
# placeholder, filemode, filepath
parts = tuple(x.strip('"\'') for x in shlex.split(line, posix=False))
if len(parts) == 1:
return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0])
elif len(parts) == 3:
return ParseResult(parts[0], FileMode(parts[1]), parts[2])
else:
raise RuntimeError("Invalid has_prefix file at path: %s" % path)
parsed_lines = (parse_line(line) for line in yield_lines(path))
return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines}
class _PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
if on_win and has_pyzzer_entry_point(data):
return replace_pyzzer_entry_point_shebang(data, a, b)
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise _PaddingError
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
data = pat.sub(replace, data)
assert len(data) == original_data_len
return data
def replace_long_shebang(mode, data):
if mode is FileMode.text:
shebang_match = SHEBANG_REGEX.match(data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode(UTF8).split('/')[-1]
new_shebang = '#!/usr/bin/env %s%s' % (executable_name, options.decode(UTF8))
data = data.replace(whole_shebang, new_shebang.encode(UTF8))
else:
# TODO: binary shebangs exist; figure this out in the future if text works well
log.debug("TODO: binary shebangs exist; figure this out in the future if text works well")
return data
def has_pyzzer_entry_point(data):
pos = data.rfind(b'PK\x05\x06')
return pos >= 0
def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
"""Code adapted from pyzzer. This is meant to deal with entry point exe's created by distlib,
which consist of a launcher, then a shebang, then a zip archive of the entry point code to run.
We need to change the shebang.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112 # NOQA
"""
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
launcher = shebang = None
pos = all_data.rfind(b'PK\x05\x06')
if pos >= 0:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
if data and shebang and launcher:
if hasattr(placeholder, 'encode'):
placeholder = placeholder.encode('utf-8')
if hasattr(new_prefix, 'encode'):
new_prefix = new_prefix.encode('utf-8')
shebang = shebang.replace(placeholder, new_prefix)
all_data = b"".join([launcher, shebang, data])
return all_data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode is FileMode.text:
data = data.replace(placeholder.encode(UTF8), new_prefix.encode(UTF8))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode(UTF8), new_prefix.encode(UTF8))
else:
raise RuntimeError("Invalid mode: %r" % mode)
return data
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
if on_win and mode is FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode(UTF8)
except IOError:
pass
return None
def read_no_link(info_dir):
return set(chain(yield_lines(join(info_dir, 'no_link')),
yield_lines(join(info_dir, 'no_softlink'))))
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
rm_rf(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = url
_, schannel = Channel(url).url_channel_wtf
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
# import pdb; pdb.set_trace()
for pdir in context.pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in context.pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[path_to_url(fname)]
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("File not removed during RM_FETCHED instruction: %s", fname)
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_FETCHED instruction: %s", fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_EXTRACTED instruction: %s", fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
path = fname[:-8]
with FileLock(path):
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None, ignore_channels=False):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = Channel(url).url_channel_wtf
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
if ignore_channels:
linked_data_[prefix][dname] = rec
else:
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
rm_rf(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix, ignore_channels=False):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5], ignore_channels=ignore_channels)
return recs
def linked(prefix, ignore_channels=False):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix, ignore_channels=ignore_channels).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
log.debug("linking package %s with link type %s", dist, linktype)
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
raise LinkError('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
# for the lock issue
# may run into lock if prefix not exist
if not isdir(prefix):
os.makedirs(prefix)
with DirectoryLock(prefix), FileLock(source_dir):
for filepath in files:
src = join(source_dir, filepath)
dst = join(prefix, filepath)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.info("file exists, but clobbering: %r" % dst)
rm_rf(dst)
lt = linktype
if filepath in has_prefix_files or filepath in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
raise CondaOSError('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for filepath in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[filepath]
try:
update_prefix(join(prefix, filepath), prefix, placeholder, mode)
except _PaddingError:
raise PaddingError(dist, placeholder, len(placeholder))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if context.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
raise LinkError("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
if isfile(alt_files_path):
# alt_files_path is a hack for noarch
meta_dict['files'] = list(yield_lines(alt_files_path))
else:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with DirectoryLock(prefix):
log.debug("unlinking package %s", dist)
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
# remove empty directories
for path in sorted(dst_dirs2, key=len, reverse=True):
if isdir(path) and not os.listdir(path):
rm_rf(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
| conda/exceptions.py
--- a/conda/exceptions.py
+++ b/conda/exceptions.py
@@ -128,8 +128,9 @@ def __init__(self, *args, **kwargs):
class PaddingError(CondaError):
- def __init__(self, *args):
- msg = 'Padding error: %s' % ' '.join(text_type(arg) for arg in self.args)
+ def __init__(self, dist, placeholder, placeholder_length):
+ msg = ("Placeholder of length '%d' too short in package %s.\n"
+ "The package must be rebuilt with conda-build > 2.0." % (placeholder_length, dist))
super(PaddingError, self).__init__(msg)
conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -248,6 +248,10 @@ def parse_line(line):
return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines}
+class _PaddingError(Exception):
+ pass
+
+
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
@@ -261,7 +265,7 @@ def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
- raise PaddingError(a, b, padding)
+ raise _PaddingError
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
@@ -968,9 +972,8 @@ def link(prefix, dist, linktype=LINK_HARD, index=None):
placeholder, mode = has_prefix_files[filepath]
try:
update_prefix(join(prefix, filepath), prefix, placeholder, mode)
- except PaddingError:
- raise PaddingError("ERROR: placeholder '%s' too short in: %s\n" %
- (placeholder, dist))
+ except _PaddingError:
+ raise PaddingError(dist, placeholder, len(placeholder))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win |
Conda Fails with create_default_packages
When using `create_default_packages`, `conda create...` fails.
## Sample condarc:
```
channels:
- defaults
create_default_packages:
- python
- pip
```
## Command
`conda create -n test_conda_update python=2 numpy`
## Error
```
Traceback (most recent call last):
File "/opt/a/b/c/muunitnoc/anaconda/lib/python2.7/site-packages/conda/exceptions.py", line 472, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/opt/a/b/c/muunitnoc/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/opt/a/b/c/muunitnoc/anaconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 68, in execute
install(args, parser, 'create')
File "/opt/a/b/c/muunitnoc/anaconda/lib/python2.7/site-packages/conda/cli/install.py", line 171, in install
default_packages.remove(default_pkg)
AttributeError: 'tuple' object has no attribute 'remove'
```
| conda/cli/install.py
<|code_start|>
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import errno
import logging
import os
import shutil
import tarfile
import tempfile
from difflib import get_close_matches
from os.path import abspath, basename, exists, isdir, join
from .. import text_type
from .._vendor.auxlib.ish import dals
from ..api import get_index
from ..base.constants import ROOT_ENV_NAME
from ..base.context import check_write, context
from ..cli import common
from ..cli.find_commands import find_executable
from ..exceptions import (CondaAssertionError, CondaEnvironmentNotFoundError,
CondaFileNotFoundError, CondaIOError, CondaImportError, CondaOSError,
CondaRuntimeError, CondaSystemExit, CondaValueError,
DirectoryNotFoundError, DryRunExit, LockError, NoPackagesFoundError,
PackageNotFoundError, TooManyArgumentsError, UnsatisfiableError)
from ..install import is_linked, linked as install_linked, name_dist
from ..misc import append_env, clone_env, explicit, touch_nonadmin
from ..plan import (add_defaults_to_specs, display_actions, execute_actions, get_pinned_specs,
install_actions, is_root_prefix, nothing_to_do, revert_actions)
from ..resolve import Resolve
from ..utils import on_win
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
raise CondaFileNotFoundError(tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
explicit(paths, prefix, verbose=verbose)
shutil.rmtree(tmp_dir)
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == ROOT_ENV_NAME:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
if isdir(prefix) and not os.listdir(prefix):
return None
error = "prefix already exists: %s" % prefix
if error:
raise CondaValueError(error, json)
def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
raise DirectoryNotFoundError(src_arg, 'no such directory: %s' % src_arg, json)
else:
src_prefix = context.clone_src
if not json:
print("Source: %s" % src_prefix)
print("Destination: %s" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet,
index_args=index_args)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
if on_win:
message = dals("""
#
# To activate this environment, use:
# > activate %s
#
# To deactivate this environment, use:
# > deactivate %s
#
# * for power-users using bash, you must source
#
""")
else:
message = dals("""
#
# To activate this environment, use:
# > source activate %s
#
# To deactivate this environment, use:
# > source deactivate %s
#
""")
return message % (arg, arg)
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
CondaValueError("expected revision number, not: '%s'" % arg, json)
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
context.validate_all()
newenv = bool(command == 'create')
isupdate = bool(command == 'update')
isinstall = bool(command == 'install')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = context.prefix if newenv else context.prefix_w_legacy_search
if newenv:
check_prefix(prefix, json=context.json)
if context.force_32bit and is_root_prefix(prefix):
raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in root env")
if isupdate and not (args.file or args.all or args.packages):
raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)
linked = install_linked(prefix)
lnames = {name_dist(d) for d in linked}
if isupdate and not args.all:
for name in args.packages:
common.arg2spec(name, json=context.json, update=True)
if name not in lnames:
raise PackageNotFoundError(name, "Package '%s' is not installed in %s" %
(name, prefix))
if newenv and not args.no_default_packages:
default_packages = context.create_default_packages[:]
# Override defaults if they are specified at the command line
for default_pkg in context.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_use_local(args)
common.ensure_override_channels_requires_channel(args)
index_args = {
'use_cache': args.use_index_cache,
'channel_urls': args.channel or (),
'unknown': args.unknown,
'prepend': not args.override_channels,
'use_local': args.use_local
}
specs = []
if args.file:
for fpath in args.file:
specs.extend(common.specs_from_url(fpath, json=context.json))
if '@EXPLICIT' in specs:
explicit(specs, prefix, verbose=not context.quiet, index_args=index_args)
return
elif getattr(args, 'all', False):
if not linked:
raise PackageNotFoundError('', "There are no packages installed in the "
"prefix %s" % prefix)
specs.extend(nm for nm in lnames)
specs.extend(common.specs_from_args(args.packages, json=context.json))
if isinstall and args.revision:
get_revision(args.revision, json=context.json)
elif isinstall and not (args.file or args.packages):
raise CondaValueError("too few arguments, "
"must supply command line package specs or --file")
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
explicit(args.packages, prefix, verbose=not context.quiet)
return
else:
raise CondaValueError("cannot mix specifications with conda package"
" filenames")
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
install_tar(prefix, tar_path, verbose=not context.quiet)
return
if newenv and args.clone:
package_diff = set(args.packages) - set(default_packages)
if package_diff:
raise TooManyArgumentsError(0, len(package_diff), list(package_diff),
'did not expect any arguments for --clone')
clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args)
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
return
index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'],
platform=None, use_local=index_args['use_local'],
use_cache=index_args['use_cache'], unknown=index_args['unknown'],
prefix=prefix)
r = Resolve(index)
ospecs = list(specs)
add_defaults_to_specs(r, linked, specs, update=isupdate)
# Don't update packages that are already up-to-date
if isupdate and not (args.all or args.force):
orig_packages = args.packages[:]
installed_metadata = [is_linked(prefix, dist) for dist in linked]
for name in orig_packages:
vers_inst = [m['version'] for m in installed_metadata if m['name'] == name]
build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name]
channel_inst = [m['channel'] for m in installed_metadata if m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
assert len(channel_inst) == 1, name
except AssertionError as e:
raise CondaAssertionError(text_type(e))
pkgs = sorted(r.get_pkgs(name))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if all([latest.version == vers_inst[0],
latest.build_number == build_inst[0],
latest.channel == channel_inst[0]]):
args.packages.remove(name)
if not args.packages:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
if args.no_deps:
only_names = set(s.split()[0] for s in ospecs)
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
raise CondaOSError("Error: could not create directory: %s" % prefix)
else:
raise CondaEnvironmentNotFoundError(prefix)
try:
if isinstall and args.revision:
actions = revert_actions(prefix, get_revision(args.revision), index)
else:
with common.json_progress_bars(json=context.json and not context.quiet):
actions = install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
always_copy=context.always_copy,
minimal_hint=args.alt_hint,
update_deps=context.update_dependencies)
except NoPackagesFoundError as e:
error_message = [e.args[0]]
if isupdate and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not context.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
for pkg in e.pkgs:
p = pkg.split()[0]
if p in args._skip:
# Avoid infinite recursion. This can happen if a spec
# comes from elsewhere, like --file
raise
args._skip.append(p)
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
nfound = 0
for pkg in sorted(e.pkgs):
pkg = pkg.split()[0]
if pkg in packages:
continue
close = get_close_matches(pkg, packages, cutoff=0.7)
if not close:
continue
if nfound == 0:
error_message.append("\n\nClose matches found; did you mean one of these?\n")
error_message.append("\n %s: %s" % (pkg, ', '.join(close)))
nfound += 1
error_message.append('\n\nYou can search for packages on anaconda.org with')
error_message.append('\n\n anaconda search -t conda %s' % pkg)
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message.append('\n\n(and similarly for the other packages)')
if not find_executable('anaconda', include_others=False):
error_message.append('\n\nYou may need to install the anaconda-client')
error_message.append(' command line client with')
error_message.append('\n\n conda install anaconda-client')
pinned_specs = get_pinned_specs(prefix)
if pinned_specs:
path = join(prefix, 'conda-meta', 'pinned')
error_message.append("\n\nNote that you have pinned specs in %s:" % path)
error_message.append("\n\n %r" % pinned_specs)
error_message = ''.join(error_message)
raise PackageNotFoundError('', error_message)
except (UnsatisfiableError, SystemExit) as e:
# Unsatisfiable package specifications/no such revision/import error
if e.args and 'could not import' in e.args[0]:
raise CondaImportError(text_type(e))
raise
if nothing_to_do(actions) and not newenv:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
elif newenv:
# needed in the case of creating an empty env
from ..instructions import LINK, UNLINK, SYMLINK_CONDA
if not actions[LINK] and not actions[UNLINK]:
actions[SYMLINK_CONDA] = [context.root_dir]
if not context.json:
print()
print("Package plan for installation in environment %s:" % prefix)
display_actions(actions, index, show_channel_urls=context.show_channel_urls)
if command in {'install', 'update'}:
check_write(command, prefix)
if not context.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
raise DryRunExit
with common.json_progress_bars(json=context.json and not context.quiet):
try:
execute_actions(actions, index, verbose=not context.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise CondaIOError("Can't write the history file", e)
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
raise LockError('Already locked: %s' % text_type(e))
else:
raise CondaRuntimeError('RuntimeError: %s' % e)
except SystemExit as e:
raise CondaSystemExit('Exiting', e)
if newenv:
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
if context.json:
common.stdout_json_success(actions=actions)
<|code_end|>
| conda/cli/install.py
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import errno
import logging
import os
import shutil
import tarfile
import tempfile
from difflib import get_close_matches
from os.path import abspath, basename, exists, isdir, join
from .. import text_type
from .._vendor.auxlib.ish import dals
from ..api import get_index
from ..base.constants import ROOT_ENV_NAME
from ..base.context import check_write, context
from ..cli import common
from ..cli.find_commands import find_executable
from ..exceptions import (CondaAssertionError, CondaEnvironmentNotFoundError,
CondaFileNotFoundError, CondaIOError, CondaImportError, CondaOSError,
CondaRuntimeError, CondaSystemExit, CondaValueError,
DirectoryNotFoundError, DryRunExit, LockError, NoPackagesFoundError,
PackageNotFoundError, TooManyArgumentsError, UnsatisfiableError)
from ..install import is_linked, linked as install_linked, name_dist
from ..misc import append_env, clone_env, explicit, touch_nonadmin
from ..plan import (add_defaults_to_specs, display_actions, execute_actions, get_pinned_specs,
install_actions, is_root_prefix, nothing_to_do, revert_actions)
from ..resolve import Resolve
from ..utils import on_win
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
raise CondaFileNotFoundError(tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
explicit(paths, prefix, verbose=verbose)
shutil.rmtree(tmp_dir)
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == ROOT_ENV_NAME:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
if isdir(prefix) and 'conda-meta' not in os.listdir(prefix):
return None
error = "prefix already exists: %s" % prefix
if error:
raise CondaValueError(error, json)
def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
raise DirectoryNotFoundError(src_arg, 'no such directory: %s' % src_arg, json)
else:
src_prefix = context.clone_src
if not json:
print("Source: %s" % src_prefix)
print("Destination: %s" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet,
index_args=index_args)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
if on_win:
message = dals("""
#
# To activate this environment, use:
# > activate %s
#
# To deactivate this environment, use:
# > deactivate %s
#
# * for power-users using bash, you must source
#
""")
else:
message = dals("""
#
# To activate this environment, use:
# > source activate %s
#
# To deactivate this environment, use:
# > source deactivate %s
#
""")
return message % (arg, arg)
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
CondaValueError("expected revision number, not: '%s'" % arg, json)
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
context.validate_all()
newenv = bool(command == 'create')
isupdate = bool(command == 'update')
isinstall = bool(command == 'install')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = context.prefix if newenv else context.prefix_w_legacy_search
if newenv:
check_prefix(prefix, json=context.json)
if context.force_32bit and is_root_prefix(prefix):
raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in root env")
if isupdate and not (args.file or args.all or args.packages):
raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)
linked = install_linked(prefix)
lnames = {name_dist(d) for d in linked}
if isupdate and not args.all:
for name in args.packages:
common.arg2spec(name, json=context.json, update=True)
if name not in lnames:
raise PackageNotFoundError(name, "Package '%s' is not installed in %s" %
(name, prefix))
if newenv and not args.no_default_packages:
default_packages = list(context.create_default_packages)
# Override defaults if they are specified at the command line
for default_pkg in context.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_use_local(args)
common.ensure_override_channels_requires_channel(args)
index_args = {
'use_cache': args.use_index_cache,
'channel_urls': args.channel or (),
'unknown': args.unknown,
'prepend': not args.override_channels,
'use_local': args.use_local
}
specs = []
if args.file:
for fpath in args.file:
specs.extend(common.specs_from_url(fpath, json=context.json))
if '@EXPLICIT' in specs:
explicit(specs, prefix, verbose=not context.quiet, index_args=index_args)
return
elif getattr(args, 'all', False):
if not linked:
raise PackageNotFoundError('', "There are no packages installed in the "
"prefix %s" % prefix)
specs.extend(nm for nm in lnames)
specs.extend(common.specs_from_args(args.packages, json=context.json))
if isinstall and args.revision:
get_revision(args.revision, json=context.json)
elif isinstall and not (args.file or args.packages):
raise CondaValueError("too few arguments, "
"must supply command line package specs or --file")
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
explicit(args.packages, prefix, verbose=not context.quiet)
return
else:
raise CondaValueError("cannot mix specifications with conda package"
" filenames")
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
install_tar(prefix, tar_path, verbose=not context.quiet)
return
if newenv and args.clone:
package_diff = set(args.packages) - set(default_packages)
if package_diff:
raise TooManyArgumentsError(0, len(package_diff), list(package_diff),
'did not expect any arguments for --clone')
clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args)
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
return
index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'],
platform=None, use_local=index_args['use_local'],
use_cache=index_args['use_cache'], unknown=index_args['unknown'],
prefix=prefix)
r = Resolve(index)
ospecs = list(specs)
add_defaults_to_specs(r, linked, specs, update=isupdate)
# Don't update packages that are already up-to-date
if isupdate and not (args.all or args.force):
orig_packages = args.packages[:]
installed_metadata = [is_linked(prefix, dist) for dist in linked]
for name in orig_packages:
vers_inst = [m['version'] for m in installed_metadata if m['name'] == name]
build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name]
channel_inst = [m['channel'] for m in installed_metadata if m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
assert len(channel_inst) == 1, name
except AssertionError as e:
raise CondaAssertionError(text_type(e))
pkgs = sorted(r.get_pkgs(name))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if all([latest.version == vers_inst[0],
latest.build_number == build_inst[0],
latest.channel == channel_inst[0]]):
args.packages.remove(name)
if not args.packages:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
if args.no_deps:
only_names = set(s.split()[0] for s in ospecs)
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
raise CondaOSError("Error: could not create directory: %s" % prefix)
else:
raise CondaEnvironmentNotFoundError(prefix)
try:
if isinstall and args.revision:
actions = revert_actions(prefix, get_revision(args.revision), index)
else:
with common.json_progress_bars(json=context.json and not context.quiet):
actions = install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
always_copy=context.always_copy,
minimal_hint=args.alt_hint,
update_deps=context.update_dependencies)
except NoPackagesFoundError as e:
error_message = [e.args[0]]
if isupdate and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not context.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
for pkg in e.pkgs:
p = pkg.split()[0]
if p in args._skip:
# Avoid infinite recursion. This can happen if a spec
# comes from elsewhere, like --file
raise
args._skip.append(p)
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
nfound = 0
for pkg in sorted(e.pkgs):
pkg = pkg.split()[0]
if pkg in packages:
continue
close = get_close_matches(pkg, packages, cutoff=0.7)
if not close:
continue
if nfound == 0:
error_message.append("\n\nClose matches found; did you mean one of these?\n")
error_message.append("\n %s: %s" % (pkg, ', '.join(close)))
nfound += 1
error_message.append('\n\nYou can search for packages on anaconda.org with')
error_message.append('\n\n anaconda search -t conda %s' % pkg)
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message.append('\n\n(and similarly for the other packages)')
if not find_executable('anaconda', include_others=False):
error_message.append('\n\nYou may need to install the anaconda-client')
error_message.append(' command line client with')
error_message.append('\n\n conda install anaconda-client')
pinned_specs = get_pinned_specs(prefix)
if pinned_specs:
path = join(prefix, 'conda-meta', 'pinned')
error_message.append("\n\nNote that you have pinned specs in %s:" % path)
error_message.append("\n\n %r" % pinned_specs)
error_message = ''.join(error_message)
raise PackageNotFoundError('', error_message)
except (UnsatisfiableError, SystemExit) as e:
# Unsatisfiable package specifications/no such revision/import error
if e.args and 'could not import' in e.args[0]:
raise CondaImportError(text_type(e))
raise
if nothing_to_do(actions) and not newenv:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
elif newenv:
# needed in the case of creating an empty env
from ..instructions import LINK, UNLINK, SYMLINK_CONDA
if not actions[LINK] and not actions[UNLINK]:
actions[SYMLINK_CONDA] = [context.root_dir]
if not context.json:
print()
print("Package plan for installation in environment %s:" % prefix)
display_actions(actions, index, show_channel_urls=context.show_channel_urls)
if command in {'install', 'update'}:
check_write(command, prefix)
if not context.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
raise DryRunExit
with common.json_progress_bars(json=context.json and not context.quiet):
try:
execute_actions(actions, index, verbose=not context.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise CondaIOError("Can't write the history file", e)
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
raise LockError('Already locked: %s' % text_type(e))
else:
raise CondaRuntimeError('RuntimeError: %s' % e)
except SystemExit as e:
raise CondaSystemExit('Exiting', e)
if newenv:
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
if context.json:
common.stdout_json_success(actions=actions)
| conda/cli/install.py
--- a/conda/cli/install.py
+++ b/conda/cli/install.py
@@ -63,7 +63,7 @@ def check_prefix(prefix, json=False):
if name == ROOT_ENV_NAME:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
- if isdir(prefix) and not os.listdir(prefix):
+ if isdir(prefix) and 'conda-meta' not in os.listdir(prefix):
return None
error = "prefix already exists: %s" % prefix
@@ -164,7 +164,7 @@ def install(args, parser, command='install'):
(name, prefix))
if newenv and not args.no_default_packages:
- default_packages = context.create_default_packages[:]
+ default_packages = list(context.create_default_packages)
# Override defaults if they are specified at the command line
for default_pkg in context.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): |
Unable to conda update --all
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
```
https://github.com/conda/conda/issues
```
Current conda install:
```
platform : win-64
conda version : 4.2.6
conda is private : False
conda-env version : 4.2.6
conda-build version : 2.0.1
python version : 3.5.2.final.0
requests version : 2.11.1
root environment : C:\Anaconda3 (writable)
default environment : C:\Anaconda3
envs directories : C:\Anaconda3\envs
package cache : C:\Anaconda3\pkgs
channel URLs : https://repo.continuum.io/pkgs/free/win-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/win-64/
https://repo.continuum.io/pkgs/pro/noarch/
https://repo.continuum.io/pkgs/msys2/win-64/
https://repo.continuum.io/pkgs/msys2/noarch/
config file : c:\users\gvdeynde\.condarc
offline mode : False
```
`$ C:\Anaconda3\Scripts\conda-script.py update --all`
```
Traceback (most recent call last):
File "C:\Anaconda3\lib\site-packages\conda\exceptions.py", line 472, in conda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Anaconda3\lib\site-packages\conda\cli\main.py", line 144, in _main
exit_code = args.func(args, p)
File "C:\Anaconda3\lib\site-packages\conda\cli\main_update.py", line 65, in execute
install(args, parser, 'update')
File "C:\Anaconda3\lib\site-packages\conda\cli\install.py", line 139, in install
context.validate_all()
File "C:\Anaconda3\lib\site-packages\conda\common\configuration.py", line 752, in validate_all
for source in self.raw_data))
File "C:\Anaconda3\lib\site-packages\conda\common\configuration.py", line 752, in <genexpr>
for source in self.raw_data))
File "C:\Anaconda3\lib\site-packages\conda\common\configuration.py", line 739, in check_source
collected_errors = parameter.collect_errors(self, typed_value, match.source)
File "C:\Anaconda3\lib\site-packages\conda\common\configuration.py", line 642, in collect_errors
for key, val in iteritems(value) if not isinstance(val, element_type))
File "C:\Anaconda3\lib\site-packages\conda\compat.py", line 148, in iteritems
return iter(getattr(d, _iteritems)())
AttributeError: 'str' object has no attribute 'items'
```
Unable to conda update --all
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
```
https://github.com/conda/conda/issues
```
Current conda install:
```
platform : win-64
conda version : 4.2.6
conda is private : False
conda-env version : 4.2.6
conda-build version : 2.0.1
python version : 3.5.2.final.0
requests version : 2.11.1
root environment : C:\Anaconda3 (writable)
default environment : C:\Anaconda3
envs directories : C:\Anaconda3\envs
package cache : C:\Anaconda3\pkgs
channel URLs : https://repo.continuum.io/pkgs/free/win-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/win-64/
https://repo.continuum.io/pkgs/pro/noarch/
https://repo.continuum.io/pkgs/msys2/win-64/
https://repo.continuum.io/pkgs/msys2/noarch/
config file : c:\users\gvdeynde\.condarc
offline mode : False
```
`$ C:\Anaconda3\Scripts\conda-script.py update --all`
```
Traceback (most recent call last):
File "C:\Anaconda3\lib\site-packages\conda\exceptions.py", line 472, in conda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Anaconda3\lib\site-packages\conda\cli\main.py", line 144, in _main
exit_code = args.func(args, p)
File "C:\Anaconda3\lib\site-packages\conda\cli\main_update.py", line 65, in execute
install(args, parser, 'update')
File "C:\Anaconda3\lib\site-packages\conda\cli\install.py", line 139, in install
context.validate_all()
File "C:\Anaconda3\lib\site-packages\conda\common\configuration.py", line 752, in validate_all
for source in self.raw_data))
File "C:\Anaconda3\lib\site-packages\conda\common\configuration.py", line 752, in <genexpr>
for source in self.raw_data))
File "C:\Anaconda3\lib\site-packages\conda\common\configuration.py", line 739, in check_source
collected_errors = parameter.collect_errors(self, typed_value, match.source)
File "C:\Anaconda3\lib\site-packages\conda\common\configuration.py", line 642, in collect_errors
for key, val in iteritems(value) if not isinstance(val, element_type))
File "C:\Anaconda3\lib\site-packages\conda\compat.py", line 148, in iteritems
return iter(getattr(d, _iteritems)())
AttributeError: 'str' object has no attribute 'items'
```
| conda/common/configuration.py
<|code_start|>
# -*- coding: utf-8 -*-
"""
A generalized application configuration utility.
Features include:
- lazy eval
- merges configuration files
- parameter type validation, with custom validation
- parameter aliases
Easily extensible to other source formats, e.g. json and ini
Limitations:
- at the moment only supports a "flat" config structure; no nested data structures
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
from collections import Mapping, Set, defaultdict
from enum import Enum
from glob import glob
from itertools import chain
from logging import getLogger
from os import environ, stat
from os.path import join
from stat import S_IFDIR, S_IFMT, S_IFREG
try:
from cytoolz.dicttoolz import merge
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concat, concatv, unique
except ImportError:
from .._vendor.toolz.dicttoolz import merge
from .._vendor.toolz.functoolz import excepts
from .._vendor.toolz.itertoolz import concat, concatv, unique
try:
from ruamel_yaml.comments import CommentedSeq, CommentedMap
except ImportError: # pragma: no cover
from ruamel.yaml.comments import CommentedSeq, CommentedMap # pragma: no cover
from .. import CondaError, CondaMultiError
from .._vendor.auxlib.collection import first, frozendict, last, AttrDict
from .._vendor.auxlib.exceptions import ThisShouldNeverHappenError
from .._vendor.auxlib.path import expand
from .._vendor.auxlib.type_coercion import typify_data_structure, TypeCoercionError
from ..base.constants import EMPTY_MAP, NULL
from .compat import (isiterable, iteritems, odict, primitive_types, text_type,
with_metaclass, string_types, itervalues)
from .yaml import yaml_load
__all__ = ["Configuration", "PrimitiveParameter",
"SequenceParameter", "MapParameter"]
log = getLogger(__name__)
def pretty_list(iterable, padding=' '): # TODO: move elsewhere in conda.common
if not isiterable(iterable):
iterable = [iterable]
return '\n'.join("%s- %s" % (padding, item) for item in iterable)
def pretty_map(dictionary, padding=' '):
return '\n'.join("%s%s: %s" % (padding, key, value) for key, value in iteritems(dictionary))
class ConfigurationError(CondaError):
pass
class ValidationError(ConfigurationError):
def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs):
self.parameter_name = parameter_name
self.parameter_value = parameter_value
self.source = source
super(ConfigurationError, self).__init__(msg, **kwargs)
def __str__(self):
return ("Parameter %s = %r declared in %s is invalid."
% (self.parameter_name, self.parameter_value, self.source))
class MultipleKeysError(ValidationError):
def __init__(self, source, keys, preferred_key):
self.source = source
self.keys = keys
msg = ("Multiple aliased keys in file %s:\n"
"%s"
"Must declare only one. Prefer '%s'" % (source, pretty_list(keys), preferred_key))
super(MultipleKeysError, self).__init__(preferred_key, None, source, msg=msg)
class InvalidTypeError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None):
self.wrong_type = wrong_type
self.valid_types = valid_types
if msg is None:
msg = ("Parameter %s = %r declared in %s has type %s.\n"
"Valid types: %s." % (parameter_name, parameter_value,
source, wrong_type, pretty_list(valid_types)))
super(InvalidTypeError, self).__init__(parameter_name, parameter_value, source, msg=msg)
class InvalidElementTypeError(InvalidTypeError):
def __init__(self, parameter_name, parameter_value, source, wrong_type,
valid_types, index_or_key):
qualifier = "at index" if isinstance(index_or_key, int) else "for key"
msg = ("Parameter %s declared in %s has invalid element %r %s %s.\n"
"Valid element types:\n"
"%s." % (parameter_name, source, parameter_value, qualifier,
index_or_key, pretty_list(valid_types)))
super(InvalidElementTypeError, self).__init__(parameter_name, parameter_value, source,
wrong_type, valid_types, msg=msg)
class CustomValidationError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, custom_message):
msg = ("Parameter %s = %r declared in %s is invalid.\n"
"%s" % (parameter_name, parameter_value, source, custom_message))
super(CustomValidationError, self).__init__(parameter_name, parameter_value, source,
msg=msg)
class MultiValidationError(CondaMultiError, ConfigurationError):
def __init__(self, errors, *args, **kwargs):
super(MultiValidationError, self).__init__(errors, *args, **kwargs)
def raise_errors(errors):
if not errors:
return True
elif len(errors) == 1:
raise errors[0]
else:
raise MultiValidationError(errors)
class ParameterFlag(Enum):
final = 'final'
top = "top"
bottom = "bottom"
def __str__(self):
return "%s" % self.value
@classmethod
def from_name(cls, name):
return cls[name]
@classmethod
def from_value(cls, value):
return cls(value)
@classmethod
def from_string(cls, string):
try:
string = string.strip('!#')
return cls.from_value(string)
except (ValueError, AttributeError):
return None
# TODO: move elsewhere, probably auxlib
# TODO: need to add order to at least frozendict, and preferrably frozenset
def make_immutable(value):
if isinstance(value, Mapping):
return frozendict(value)
elif isinstance(value, Set):
return frozenset(value)
elif isiterable(value):
return tuple(value)
else:
return value
@with_metaclass(ABCMeta)
class RawParameter(object):
def __init__(self, source, key, raw_value):
self.source = source
self.key = key
self._raw_value = raw_value
def __repr__(self):
return text_type(vars(self))
@abstractmethod
def value(self, parameter_obj):
raise NotImplementedError()
@abstractmethod
def keyflag(self):
raise NotImplementedError()
@abstractmethod
def valueflags(self, parameter_obj):
raise NotImplementedError()
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key])) for key in from_map)
return EMPTY_MAP
class EnvRawParameter(RawParameter):
source = 'envvars'
def value(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
raw_value = self._raw_value
if string_delimiter in raw_value:
value = raw_value.split(string_delimiter)
else:
value = [raw_value]
return tuple(v.strip() for v in value)
else:
return self.__important_split_value[0].strip()
def keyflag(self):
return ParameterFlag.final if len(self.__important_split_value) >= 2 else None
def valueflags(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
return tuple('' for _ in self._raw_value.split(string_delimiter))
else:
return self.__important_split_value[0].strip()
@property
def __important_split_value(self):
return self._raw_value.split("!important")
@classmethod
def make_raw_parameters(cls, appname):
keystart = "{0}_".format(appname.upper())
raw_env = dict((k.replace(keystart, '', 1).lower(), v)
for k, v in iteritems(environ) if k.startswith(keystart))
return super(EnvRawParameter, cls).make_raw_parameters(EnvRawParameter.source, raw_env)
class ArgParseRawParameter(RawParameter):
source = 'cmd_line'
def value(self, parameter_obj):
return make_immutable(self._raw_value)
def keyflag(self):
return None
def valueflags(self, parameter_obj):
return None
@classmethod
def make_raw_parameters(cls, args_from_argparse):
return super(ArgParseRawParameter, cls).make_raw_parameters(ArgParseRawParameter.source,
args_from_argparse)
class YamlRawParameter(RawParameter):
# this class should encapsulate all direct use of ruamel.yaml in this module
def __init__(self, source, key, raw_value, keycomment):
self._keycomment = keycomment
super(YamlRawParameter, self).__init__(source, key, raw_value)
def value(self, parameter_obj):
self.__process(parameter_obj)
return self._value
def keyflag(self):
return ParameterFlag.from_string(self._keycomment)
def valueflags(self, parameter_obj):
self.__process(parameter_obj)
return self._valueflags
def __process(self, parameter_obj):
if hasattr(self, '_value'):
return
elif isinstance(self._raw_value, CommentedSeq):
valuecomments = self._get_yaml_list_comments(self._raw_value)
self._valueflags = tuple(ParameterFlag.from_string(s) for s in valuecomments)
self._value = tuple(self._raw_value)
elif isinstance(self._raw_value, CommentedMap):
valuecomments = self._get_yaml_map_comments(self._raw_value)
self._valueflags = dict((k, ParameterFlag.from_string(v))
for k, v in iteritems(valuecomments) if v is not None)
self._value = frozendict(self._raw_value)
elif isinstance(self._raw_value, primitive_types):
self._valueflags = None
self._value = self._raw_value
else:
raise ThisShouldNeverHappenError() # pragma: no cover
@staticmethod
def _get_yaml_key_comment(commented_dict, key):
try:
return commented_dict.ca.items[key][2].value.strip()
except (AttributeError, KeyError):
return None
@staticmethod
def _get_yaml_list_comments(value):
items = value.ca.items
raw_comment_lines = tuple(excepts((AttributeError, KeyError, TypeError),
lambda q: items.get(q)[0].value.strip() or None,
lambda _: None # default value on exception
)(q)
for q in range(len(value)))
return raw_comment_lines
@staticmethod
def _get_yaml_map_comments(rawvalue):
return dict((key, excepts(KeyError,
lambda k: rawvalue.ca.items[k][2].value.strip() or None,
lambda _: None # default value on exception
)(key))
for key in rawvalue)
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key],
cls._get_yaml_key_comment(from_map, key)))
for key in from_map)
return EMPTY_MAP
@classmethod
def make_raw_parameters_from_file(cls, filepath):
with open(filepath, 'r') as fh:
ruamel_yaml = yaml_load(fh)
return cls.make_raw_parameters(filepath, ruamel_yaml) or EMPTY_MAP
def load_file_configs(search_path):
# returns an ordered map of filepath and dict of raw parameter objects
def _file_yaml_loader(fullpath):
assert fullpath.endswith(".yml") or fullpath.endswith("condarc"), fullpath
yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath)
def _dir_yaml_loader(fullpath):
for filepath in glob(join(fullpath, "*.yml")):
yield filepath, YamlRawParameter.make_raw_parameters_from_file(filepath)
# map a stat result to a file loader or a directory loader
_loader = {
S_IFREG: _file_yaml_loader,
S_IFDIR: _dir_yaml_loader,
}
def _get_st_mode(path):
# stat the path for file type, or None if path doesn't exist
try:
return S_IFMT(stat(path).st_mode)
except OSError:
return None
expanded_paths = tuple(expand(path) for path in search_path)
stat_paths = (_get_st_mode(path) for path in expanded_paths)
load_paths = (_loader[st_mode](path)
for path, st_mode in zip(expanded_paths, stat_paths)
if st_mode is not None)
raw_data = odict(kv for kv in chain.from_iterable(load_paths))
return raw_data
@with_metaclass(ABCMeta)
class Parameter(object):
_type = None
_element_type = None
def __init__(self, default, aliases=(), validation=None):
self._name = None
self._names = None
self.default = default
self.aliases = aliases
self._validation = validation
def _set_name(self, name):
# this is an explicit method, and not a descriptor/setter
# it's meant to be called by the Configuration metaclass
self._name = name
self._names = frozenset(x for x in chain(self.aliases, (name, )))
return name
@property
def name(self):
if self._name is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._name
@property
def names(self):
if self._names is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._names
def _raw_parameters_from_single_source(self, raw_parameters):
# while supporting parameter name aliases, we enforce that only one definition is given
# per data source
keys = self.names & frozenset(raw_parameters.keys())
matches = {key: raw_parameters[key] for key in keys}
numkeys = len(keys)
if numkeys == 0:
return None, None
elif numkeys == 1:
return next(itervalues(matches)), None
elif self.name in keys:
return matches[self.name], MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
else:
return None, MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
def _get_all_matches(self, instance):
# a match is a raw parameter instance
matches = []
multikey_exceptions = []
for filepath, raw_parameters in iteritems(instance.raw_data):
match, error = self._raw_parameters_from_single_source(raw_parameters)
if match is not None:
matches.append(match)
if error:
multikey_exceptions.append(error)
return matches, multikey_exceptions
@abstractmethod
def _merge(self, matches):
raise NotImplementedError()
def __get__(self, instance, instance_type):
# strategy is "extract and merge," which is actually just map and reduce
# extract matches from each source in SEARCH_PATH
# then merge matches together
if self.name in instance._cache:
return instance._cache[self.name]
matches, errors = self._get_all_matches(instance)
try:
result = typify_data_structure(self._merge(matches) if matches else self.default,
self._element_type)
except TypeCoercionError as e:
errors.append(CustomValidationError(self.name, e.value, "<<merged>>", text_type(e)))
else:
errors.extend(self.collect_errors(instance, result))
raise_errors(errors)
instance._cache[self.name] = result
return result
def collect_errors(self, instance, value, source="<<merged>>"):
"""Validate a Parameter value.
Args:
instance (Configuration): The instance object to which the Parameter descriptor is
attached.
value: The value to be validated.
"""
errors = []
if not isinstance(value, self._type):
errors.append(InvalidTypeError(self.name, value, source, type(value),
self._type))
elif self._validation is not None:
result = self._validation(value)
if result is False:
errors.append(ValidationError(self.name, value, source))
elif isinstance(result, string_types):
errors.append(CustomValidationError(self.name, value, source, result))
return errors
def _match_key_is_important(self, raw_parameter):
return raw_parameter.keyflag() is ParameterFlag.final
def _first_important_matches(self, matches):
idx = first(enumerate(matches), lambda x: self._match_key_is_important(x[1]),
apply=lambda x: x[0])
return matches if idx is None else matches[:idx+1]
@staticmethod
def _str_format_flag(flag):
return " #!%s" % flag if flag is not None else ''
@staticmethod
def _str_format_value(value):
if value is None:
return 'None'
return value
@classmethod
def repr_raw(cls, raw_parameter):
raise NotImplementedError()
class PrimitiveParameter(Parameter):
"""Parameter type for a Configuration class that holds a single python primitive value.
The python primitive types are str, int, float, complex, bool, and NoneType. In addition,
python 2 has long and unicode types.
"""
def __init__(self, default, aliases=(), validation=None, parameter_type=None):
"""
Args:
default (Any): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
parameter_type (type or Tuple[type]): Type-validation of parameter's value. If None,
type(default) is used.
"""
self._type = type(default) if parameter_type is None else parameter_type
self._element_type = self._type
super(PrimitiveParameter, self).__init__(default, aliases, validation)
def _merge(self, matches):
important_match = first(matches, self._match_key_is_important, default=None)
if important_match is not None:
return important_match.value(self)
last_match = last(matches, lambda x: x is not None, default=None)
if last_match is not None:
return last_match.value(self)
raise ThisShouldNeverHappenError() # pragma: no cover
def repr_raw(self, raw_parameter):
return "%s: %s%s" % (raw_parameter.key,
self._str_format_value(raw_parameter.value(self)),
self._str_format_flag(raw_parameter.keyflag()))
class SequenceParameter(Parameter):
"""Parameter type for a Configuration class that holds a sequence (i.e. list) of python
primitive values.
"""
_type = tuple
def __init__(self, element_type, default=(), aliases=(), validation=None,
string_delimiter=','):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element in
the sequence.
default (Iterable[str]): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
self.string_delimiter = string_delimiter
super(SequenceParameter, self).__init__(default, aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(SequenceParameter, self).collect_errors(instance, value)
element_type = self._element_type
for idx, element in enumerate(value):
if not isinstance(element, element_type):
errors.append(InvalidElementTypeError(self.name, element, source,
type(element), element_type, idx))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# get individual lines from important_matches that were marked important
# these will be prepended to the final result
def get_marked_lines(match, marker, parameter_obj):
return tuple(line
for line, flag in zip(match.value(parameter_obj),
match.valueflags(parameter_obj))
if flag is marker)
top_lines = concat(get_marked_lines(m, ParameterFlag.top, self) for m in relevant_matches)
# also get lines that were marked as bottom, but reverse the match order so that lines
# coming earlier will ultimately be last
bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom, self) for m in
reversed(relevant_matches))
# now, concat all lines, while reversing the matches
# reverse because elements closer to the end of search path take precedence
all_lines = concat(m.value(self) for m in reversed(relevant_matches))
# stack top_lines + all_lines, then de-dupe
top_deduped = tuple(unique(concatv(top_lines, all_lines)))
# take the top-deduped lines, reverse them, and concat with reversed bottom_lines
# this gives us the reverse of the order we want, but almost there
# NOTE: for a line value marked both top and bottom, the bottom marker will win out
# for the top marker to win out, we'd need one additional de-dupe step
bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))
# just reverse, and we're good to go
return tuple(reversed(tuple(bottom_deduped)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for q, value in enumerate(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self)[q]
lines.append(" - %s%s" % (self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class MapParameter(Parameter):
"""Parameter type for a Configuration class that holds a map (i.e. dict) of python
primitive values.
"""
_type = dict
def __init__(self, element_type, default=None, aliases=(), validation=None):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element.
default (Mapping): The parameter's default value. If None, will be an empty dict.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
super(MapParameter, self).__init__(default or dict(), aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(MapParameter, self).collect_errors(instance, value)
element_type = self._element_type
errors.extend(InvalidElementTypeError(self.name, val, source, type(val), element_type, key)
for key, val in iteritems(value) if not isinstance(val, element_type))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# mapkeys with important matches
def key_is_important(match, key):
return match.valueflags(self).get(key) is ParameterFlag.final
important_maps = tuple(dict((k, v)
for k, v in iteritems(match.value(self))
if key_is_important(match, k))
for match in relevant_matches)
# dump all matches in a dict
# then overwrite with important matches
return merge(concatv((m.value(self) for m in relevant_matches),
reversed(important_maps)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for valuekey, value in iteritems(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self).get(valuekey)
lines.append(" %s: %s%s" % (valuekey, self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class ConfigurationType(type):
"""metaclass for Configuration"""
def __init__(cls, name, bases, attr):
super(ConfigurationType, cls).__init__(name, bases, attr)
# call _set_name for each parameter
cls.parameter_names = tuple(p._set_name(name) for name, p in iteritems(cls.__dict__)
if isinstance(p, Parameter))
@with_metaclass(ConfigurationType)
class Configuration(object):
def __init__(self, search_path=(), app_name=None, argparse_args=None):
self.raw_data = odict()
self._cache = dict()
self._validation_errors = defaultdict(list)
if search_path:
self._add_search_path(search_path)
if app_name is not None:
self._add_env_vars(app_name)
if argparse_args is not None:
self._add_argparse_args(argparse_args)
def _add_search_path(self, search_path):
return self._add_raw_data(load_file_configs(search_path))
def _add_env_vars(self, app_name):
self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters(app_name)
self._cache = dict()
return self
def _add_argparse_args(self, argparse_args):
self._argparse_args = AttrDict((k, v) for k, v, in iteritems(vars(argparse_args))
if v is not NULL)
source = ArgParseRawParameter.source
self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(self._argparse_args)
self._cache = dict()
return self
def _add_raw_data(self, raw_data):
self.raw_data.update(raw_data)
self._cache = dict()
return self
def check_source(self, source):
# this method ends up duplicating much of the logic of Parameter.__get__
# I haven't yet found a way to make it more DRY though
typed_values = {}
validation_errors = []
raw_parameters = self.raw_data[source]
for key in self.parameter_names:
parameter = self.__class__.__dict__[key]
match, multikey_error = parameter._raw_parameters_from_single_source(raw_parameters)
if multikey_error:
validation_errors.append(multikey_error)
if match is not None:
try:
typed_value = typify_data_structure(match.value(parameter),
parameter._element_type)
except TypeCoercionError as e:
validation_errors.append(CustomValidationError(match.key, e.value,
match.source, text_type(e)))
else:
collected_errors = parameter.collect_errors(self, typed_value, match.source)
if collected_errors:
validation_errors.extend(collected_errors)
else:
typed_values[match.key] = typed_value # parameter.repr_raw(match)
else:
# this situation will happen if there is a multikey_error and none of the
# matched keys is the primary key
pass
return typed_values, validation_errors
def validate_all(self):
validation_errors = list(chain.from_iterable(self.check_source(source)[1]
for source in self.raw_data))
raise_errors(validation_errors)
def collect_all(self):
typed_values = odict()
validation_errors = odict()
for source in self.raw_data:
typed_values[source], validation_errors[source] = self.check_source(source)
raise_errors(tuple(chain.from_iterable(itervalues(validation_errors))))
return odict((k, v) for k, v in iteritems(typed_values) if v)
<|code_end|>
| conda/common/configuration.py
# -*- coding: utf-8 -*-
"""
A generalized application configuration utility.
Features include:
- lazy eval
- merges configuration files
- parameter type validation, with custom validation
- parameter aliases
Easily extensible to other source formats, e.g. json and ini
Limitations:
- at the moment only supports a "flat" config structure; no nested data structures
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
from collections import Mapping, Set, defaultdict
from enum import Enum
from glob import glob
from itertools import chain
from logging import getLogger
from os import environ, stat
from os.path import join
from stat import S_IFDIR, S_IFMT, S_IFREG
try:
from cytoolz.dicttoolz import merge
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concat, concatv, unique
except ImportError:
from .._vendor.toolz.dicttoolz import merge
from .._vendor.toolz.functoolz import excepts
from .._vendor.toolz.itertoolz import concat, concatv, unique
try:
from ruamel_yaml.comments import CommentedSeq, CommentedMap
except ImportError: # pragma: no cover
from ruamel.yaml.comments import CommentedSeq, CommentedMap # pragma: no cover
from .. import CondaError, CondaMultiError
from .._vendor.auxlib.collection import first, frozendict, last, AttrDict
from .._vendor.auxlib.exceptions import ThisShouldNeverHappenError
from .._vendor.auxlib.path import expand
from .._vendor.auxlib.type_coercion import typify_data_structure, TypeCoercionError
from ..base.constants import EMPTY_MAP, NULL
from .compat import (isiterable, iteritems, odict, primitive_types, text_type,
with_metaclass, string_types, itervalues)
from .yaml import yaml_load
__all__ = ["Configuration", "PrimitiveParameter",
"SequenceParameter", "MapParameter"]
log = getLogger(__name__)
def pretty_list(iterable, padding=' '): # TODO: move elsewhere in conda.common
if not isiterable(iterable):
iterable = [iterable]
return '\n'.join("%s- %s" % (padding, item) for item in iterable)
def pretty_map(dictionary, padding=' '):
return '\n'.join("%s%s: %s" % (padding, key, value) for key, value in iteritems(dictionary))
class ConfigurationError(CondaError):
pass
class ValidationError(ConfigurationError):
def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs):
self.parameter_name = parameter_name
self.parameter_value = parameter_value
self.source = source
super(ConfigurationError, self).__init__(msg, **kwargs)
def __str__(self):
return ("Parameter %s = %r declared in %s is invalid."
% (self.parameter_name, self.parameter_value, self.source))
class MultipleKeysError(ValidationError):
def __init__(self, source, keys, preferred_key):
self.source = source
self.keys = keys
msg = ("Multiple aliased keys in file %s:\n"
"%s"
"Must declare only one. Prefer '%s'" % (source, pretty_list(keys), preferred_key))
super(MultipleKeysError, self).__init__(preferred_key, None, source, msg=msg)
class InvalidTypeError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None):
self.wrong_type = wrong_type
self.valid_types = valid_types
if msg is None:
msg = ("Parameter %s = %r declared in %s has type %s.\n"
"Valid types: %s." % (parameter_name, parameter_value,
source, wrong_type, pretty_list(valid_types)))
super(InvalidTypeError, self).__init__(parameter_name, parameter_value, source, msg=msg)
class InvalidElementTypeError(InvalidTypeError):
def __init__(self, parameter_name, parameter_value, source, wrong_type,
valid_types, index_or_key):
qualifier = "at index" if isinstance(index_or_key, int) else "for key"
msg = ("Parameter %s declared in %s has invalid element %r %s %s.\n"
"Valid element types:\n"
"%s." % (parameter_name, source, parameter_value, qualifier,
index_or_key, pretty_list(valid_types)))
super(InvalidElementTypeError, self).__init__(parameter_name, parameter_value, source,
wrong_type, valid_types, msg=msg)
class CustomValidationError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, custom_message):
msg = ("Parameter %s = %r declared in %s is invalid.\n"
"%s" % (parameter_name, parameter_value, source, custom_message))
super(CustomValidationError, self).__init__(parameter_name, parameter_value, source,
msg=msg)
class MultiValidationError(CondaMultiError, ConfigurationError):
def __init__(self, errors, *args, **kwargs):
super(MultiValidationError, self).__init__(errors, *args, **kwargs)
def raise_errors(errors):
if not errors:
return True
elif len(errors) == 1:
raise errors[0]
else:
raise MultiValidationError(errors)
class ParameterFlag(Enum):
final = 'final'
top = "top"
bottom = "bottom"
def __str__(self):
return "%s" % self.value
@classmethod
def from_name(cls, name):
return cls[name]
@classmethod
def from_value(cls, value):
return cls(value)
@classmethod
def from_string(cls, string):
try:
string = string.strip('!#')
return cls.from_value(string)
except (ValueError, AttributeError):
return None
# TODO: move elsewhere, probably auxlib
# TODO: need to add order to at least frozendict, and preferrably frozenset
def make_immutable(value):
if isinstance(value, Mapping):
return frozendict(value)
elif isinstance(value, Set):
return frozenset(value)
elif isiterable(value):
return tuple(value)
else:
return value
@with_metaclass(ABCMeta)
class RawParameter(object):
def __init__(self, source, key, raw_value):
self.source = source
self.key = key
self._raw_value = raw_value
def __repr__(self):
return text_type(vars(self))
@abstractmethod
def value(self, parameter_obj):
raise NotImplementedError()
@abstractmethod
def keyflag(self):
raise NotImplementedError()
@abstractmethod
def valueflags(self, parameter_obj):
raise NotImplementedError()
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key])) for key in from_map)
return EMPTY_MAP
class EnvRawParameter(RawParameter):
source = 'envvars'
def value(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
raw_value = self._raw_value
if string_delimiter in raw_value:
value = raw_value.split(string_delimiter)
else:
value = [raw_value]
return tuple(v.strip() for v in value)
else:
return self.__important_split_value[0].strip()
def keyflag(self):
return ParameterFlag.final if len(self.__important_split_value) >= 2 else None
def valueflags(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
return tuple('' for _ in self._raw_value.split(string_delimiter))
else:
return self.__important_split_value[0].strip()
@property
def __important_split_value(self):
return self._raw_value.split("!important")
@classmethod
def make_raw_parameters(cls, appname):
keystart = "{0}_".format(appname.upper())
raw_env = dict((k.replace(keystart, '', 1).lower(), v)
for k, v in iteritems(environ) if k.startswith(keystart))
return super(EnvRawParameter, cls).make_raw_parameters(EnvRawParameter.source, raw_env)
class ArgParseRawParameter(RawParameter):
source = 'cmd_line'
def value(self, parameter_obj):
return make_immutable(self._raw_value)
def keyflag(self):
return None
def valueflags(self, parameter_obj):
return None
@classmethod
def make_raw_parameters(cls, args_from_argparse):
return super(ArgParseRawParameter, cls).make_raw_parameters(ArgParseRawParameter.source,
args_from_argparse)
class YamlRawParameter(RawParameter):
# this class should encapsulate all direct use of ruamel.yaml in this module
def __init__(self, source, key, raw_value, keycomment):
self._keycomment = keycomment
super(YamlRawParameter, self).__init__(source, key, raw_value)
def value(self, parameter_obj):
self.__process(parameter_obj)
return self._value
def keyflag(self):
return ParameterFlag.from_string(self._keycomment)
def valueflags(self, parameter_obj):
self.__process(parameter_obj)
return self._valueflags
def __process(self, parameter_obj):
if hasattr(self, '_value'):
return
elif isinstance(self._raw_value, CommentedSeq):
valuecomments = self._get_yaml_list_comments(self._raw_value)
self._valueflags = tuple(ParameterFlag.from_string(s) for s in valuecomments)
self._value = tuple(self._raw_value)
elif isinstance(self._raw_value, CommentedMap):
valuecomments = self._get_yaml_map_comments(self._raw_value)
self._valueflags = dict((k, ParameterFlag.from_string(v))
for k, v in iteritems(valuecomments) if v is not None)
self._value = frozendict(self._raw_value)
elif isinstance(self._raw_value, primitive_types):
self._valueflags = None
self._value = self._raw_value
else:
raise ThisShouldNeverHappenError() # pragma: no cover
@staticmethod
def _get_yaml_key_comment(commented_dict, key):
try:
return commented_dict.ca.items[key][2].value.strip()
except (AttributeError, KeyError):
return None
@staticmethod
def _get_yaml_list_comments(value):
items = value.ca.items
raw_comment_lines = tuple(excepts((AttributeError, KeyError, TypeError),
lambda q: items.get(q)[0].value.strip() or None,
lambda _: None # default value on exception
)(q)
for q in range(len(value)))
return raw_comment_lines
@staticmethod
def _get_yaml_map_comments(rawvalue):
return dict((key, excepts(KeyError,
lambda k: rawvalue.ca.items[k][2].value.strip() or None,
lambda _: None # default value on exception
)(key))
for key in rawvalue)
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key],
cls._get_yaml_key_comment(from_map, key)))
for key in from_map)
return EMPTY_MAP
@classmethod
def make_raw_parameters_from_file(cls, filepath):
with open(filepath, 'r') as fh:
ruamel_yaml = yaml_load(fh)
return cls.make_raw_parameters(filepath, ruamel_yaml) or EMPTY_MAP
def load_file_configs(search_path):
# returns an ordered map of filepath and dict of raw parameter objects
def _file_yaml_loader(fullpath):
assert fullpath.endswith(".yml") or fullpath.endswith("condarc"), fullpath
yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath)
def _dir_yaml_loader(fullpath):
for filepath in glob(join(fullpath, "*.yml")):
yield filepath, YamlRawParameter.make_raw_parameters_from_file(filepath)
# map a stat result to a file loader or a directory loader
_loader = {
S_IFREG: _file_yaml_loader,
S_IFDIR: _dir_yaml_loader,
}
def _get_st_mode(path):
# stat the path for file type, or None if path doesn't exist
try:
return S_IFMT(stat(path).st_mode)
except OSError:
return None
expanded_paths = tuple(expand(path) for path in search_path)
stat_paths = (_get_st_mode(path) for path in expanded_paths)
load_paths = (_loader[st_mode](path)
for path, st_mode in zip(expanded_paths, stat_paths)
if st_mode is not None)
raw_data = odict(kv for kv in chain.from_iterable(load_paths))
return raw_data
@with_metaclass(ABCMeta)
class Parameter(object):
_type = None
_element_type = None
def __init__(self, default, aliases=(), validation=None):
self._name = None
self._names = None
self.default = default
self.aliases = aliases
self._validation = validation
def _set_name(self, name):
# this is an explicit method, and not a descriptor/setter
# it's meant to be called by the Configuration metaclass
self._name = name
self._names = frozenset(x for x in chain(self.aliases, (name, )))
return name
@property
def name(self):
if self._name is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._name
@property
def names(self):
if self._names is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._names
def _raw_parameters_from_single_source(self, raw_parameters):
# while supporting parameter name aliases, we enforce that only one definition is given
# per data source
keys = self.names & frozenset(raw_parameters.keys())
matches = {key: raw_parameters[key] for key in keys}
numkeys = len(keys)
if numkeys == 0:
return None, None
elif numkeys == 1:
return next(itervalues(matches)), None
elif self.name in keys:
return matches[self.name], MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
else:
return None, MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
def _get_all_matches(self, instance):
# a match is a raw parameter instance
matches = []
multikey_exceptions = []
for filepath, raw_parameters in iteritems(instance.raw_data):
match, error = self._raw_parameters_from_single_source(raw_parameters)
if match is not None:
matches.append(match)
if error:
multikey_exceptions.append(error)
return matches, multikey_exceptions
@abstractmethod
def _merge(self, matches):
raise NotImplementedError()
def __get__(self, instance, instance_type):
# strategy is "extract and merge," which is actually just map and reduce
# extract matches from each source in SEARCH_PATH
# then merge matches together
if self.name in instance._cache:
return instance._cache[self.name]
matches, errors = self._get_all_matches(instance)
try:
result = typify_data_structure(self._merge(matches) if matches else self.default,
self._element_type)
except TypeCoercionError as e:
errors.append(CustomValidationError(self.name, e.value, "<<merged>>", text_type(e)))
else:
errors.extend(self.collect_errors(instance, result))
raise_errors(errors)
instance._cache[self.name] = result
return result
def collect_errors(self, instance, value, source="<<merged>>"):
"""Validate a Parameter value.
Args:
instance (Configuration): The instance object to which the Parameter descriptor is
attached.
value: The value to be validated.
"""
errors = []
if not isinstance(value, self._type):
errors.append(InvalidTypeError(self.name, value, source, type(value),
self._type))
elif self._validation is not None:
result = self._validation(value)
if result is False:
errors.append(ValidationError(self.name, value, source))
elif isinstance(result, string_types):
errors.append(CustomValidationError(self.name, value, source, result))
return errors
def _match_key_is_important(self, raw_parameter):
return raw_parameter.keyflag() is ParameterFlag.final
def _first_important_matches(self, matches):
idx = first(enumerate(matches), lambda x: self._match_key_is_important(x[1]),
apply=lambda x: x[0])
return matches if idx is None else matches[:idx+1]
@staticmethod
def _str_format_flag(flag):
return " #!%s" % flag if flag is not None else ''
@staticmethod
def _str_format_value(value):
if value is None:
return 'None'
return value
@classmethod
def repr_raw(cls, raw_parameter):
raise NotImplementedError()
class PrimitiveParameter(Parameter):
"""Parameter type for a Configuration class that holds a single python primitive value.
The python primitive types are str, int, float, complex, bool, and NoneType. In addition,
python 2 has long and unicode types.
"""
def __init__(self, default, aliases=(), validation=None, parameter_type=None):
"""
Args:
default (Any): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
parameter_type (type or Tuple[type]): Type-validation of parameter's value. If None,
type(default) is used.
"""
self._type = type(default) if parameter_type is None else parameter_type
self._element_type = self._type
super(PrimitiveParameter, self).__init__(default, aliases, validation)
def _merge(self, matches):
important_match = first(matches, self._match_key_is_important, default=None)
if important_match is not None:
return important_match.value(self)
last_match = last(matches, lambda x: x is not None, default=None)
if last_match is not None:
return last_match.value(self)
raise ThisShouldNeverHappenError() # pragma: no cover
def repr_raw(self, raw_parameter):
return "%s: %s%s" % (raw_parameter.key,
self._str_format_value(raw_parameter.value(self)),
self._str_format_flag(raw_parameter.keyflag()))
class SequenceParameter(Parameter):
"""Parameter type for a Configuration class that holds a sequence (i.e. list) of python
primitive values.
"""
_type = tuple
def __init__(self, element_type, default=(), aliases=(), validation=None,
string_delimiter=','):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element in
the sequence.
default (Iterable[str]): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
self.string_delimiter = string_delimiter
super(SequenceParameter, self).__init__(default, aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(SequenceParameter, self).collect_errors(instance, value)
element_type = self._element_type
for idx, element in enumerate(value):
if not isinstance(element, element_type):
errors.append(InvalidElementTypeError(self.name, element, source,
type(element), element_type, idx))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# get individual lines from important_matches that were marked important
# these will be prepended to the final result
def get_marked_lines(match, marker, parameter_obj):
return tuple(line
for line, flag in zip(match.value(parameter_obj),
match.valueflags(parameter_obj))
if flag is marker)
top_lines = concat(get_marked_lines(m, ParameterFlag.top, self) for m in relevant_matches)
# also get lines that were marked as bottom, but reverse the match order so that lines
# coming earlier will ultimately be last
bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom, self) for m in
reversed(relevant_matches))
# now, concat all lines, while reversing the matches
# reverse because elements closer to the end of search path take precedence
all_lines = concat(m.value(self) for m in reversed(relevant_matches))
# stack top_lines + all_lines, then de-dupe
top_deduped = tuple(unique(concatv(top_lines, all_lines)))
# take the top-deduped lines, reverse them, and concat with reversed bottom_lines
# this gives us the reverse of the order we want, but almost there
# NOTE: for a line value marked both top and bottom, the bottom marker will win out
# for the top marker to win out, we'd need one additional de-dupe step
bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))
# just reverse, and we're good to go
return tuple(reversed(tuple(bottom_deduped)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for q, value in enumerate(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self)[q]
lines.append(" - %s%s" % (self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class MapParameter(Parameter):
"""Parameter type for a Configuration class that holds a map (i.e. dict) of python
primitive values.
"""
_type = dict
def __init__(self, element_type, default=None, aliases=(), validation=None):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element.
default (Mapping): The parameter's default value. If None, will be an empty dict.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
super(MapParameter, self).__init__(default or dict(), aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(MapParameter, self).collect_errors(instance, value)
if isinstance(value, Mapping):
element_type = self._element_type
errors.extend(InvalidElementTypeError(self.name, val, source, type(val),
element_type, key)
for key, val in iteritems(value) if not isinstance(val, element_type))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# mapkeys with important matches
def key_is_important(match, key):
return match.valueflags(self).get(key) is ParameterFlag.final
important_maps = tuple(dict((k, v)
for k, v in iteritems(match.value(self))
if key_is_important(match, k))
for match in relevant_matches)
# dump all matches in a dict
# then overwrite with important matches
return merge(concatv((m.value(self) for m in relevant_matches),
reversed(important_maps)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for valuekey, value in iteritems(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self).get(valuekey)
lines.append(" %s: %s%s" % (valuekey, self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class ConfigurationType(type):
"""metaclass for Configuration"""
def __init__(cls, name, bases, attr):
super(ConfigurationType, cls).__init__(name, bases, attr)
# call _set_name for each parameter
cls.parameter_names = tuple(p._set_name(name) for name, p in iteritems(cls.__dict__)
if isinstance(p, Parameter))
@with_metaclass(ConfigurationType)
class Configuration(object):
def __init__(self, search_path=(), app_name=None, argparse_args=None):
self.raw_data = odict()
self._cache = dict()
self._validation_errors = defaultdict(list)
if search_path:
self._add_search_path(search_path)
if app_name is not None:
self._add_env_vars(app_name)
if argparse_args is not None:
self._add_argparse_args(argparse_args)
def _add_search_path(self, search_path):
return self._add_raw_data(load_file_configs(search_path))
def _add_env_vars(self, app_name):
self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters(app_name)
self._cache = dict()
return self
def _add_argparse_args(self, argparse_args):
self._argparse_args = AttrDict((k, v) for k, v, in iteritems(vars(argparse_args))
if v is not NULL)
source = ArgParseRawParameter.source
self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(self._argparse_args)
self._cache = dict()
return self
def _add_raw_data(self, raw_data):
self.raw_data.update(raw_data)
self._cache = dict()
return self
def check_source(self, source):
# this method ends up duplicating much of the logic of Parameter.__get__
# I haven't yet found a way to make it more DRY though
typed_values = {}
validation_errors = []
raw_parameters = self.raw_data[source]
for key in self.parameter_names:
parameter = self.__class__.__dict__[key]
match, multikey_error = parameter._raw_parameters_from_single_source(raw_parameters)
if multikey_error:
validation_errors.append(multikey_error)
if match is not None:
try:
typed_value = typify_data_structure(match.value(parameter),
parameter._element_type)
except TypeCoercionError as e:
validation_errors.append(CustomValidationError(match.key, e.value,
match.source, text_type(e)))
else:
collected_errors = parameter.collect_errors(self, typed_value, match.source)
if collected_errors:
validation_errors.extend(collected_errors)
else:
typed_values[match.key] = typed_value # parameter.repr_raw(match)
else:
# this situation will happen if there is a multikey_error and none of the
# matched keys is the primary key
pass
return typed_values, validation_errors
def validate_all(self):
validation_errors = list(chain.from_iterable(self.check_source(source)[1]
for source in self.raw_data))
raise_errors(validation_errors)
def collect_all(self):
typed_values = odict()
validation_errors = odict()
for source in self.raw_data:
typed_values[source], validation_errors[source] = self.check_source(source)
raise_errors(tuple(chain.from_iterable(itervalues(validation_errors))))
return odict((k, v) for k, v in iteritems(typed_values) if v)
| conda/common/configuration.py
--- a/conda/common/configuration.py
+++ b/conda/common/configuration.py
@@ -637,9 +637,11 @@ def __init__(self, element_type, default=None, aliases=(), validation=None):
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(MapParameter, self).collect_errors(instance, value)
- element_type = self._element_type
- errors.extend(InvalidElementTypeError(self.name, val, source, type(val), element_type, key)
- for key, val in iteritems(value) if not isinstance(val, element_type))
+ if isinstance(value, Mapping):
+ element_type = self._element_type
+ errors.extend(InvalidElementTypeError(self.name, val, source, type(val),
+ element_type, key)
+ for key, val in iteritems(value) if not isinstance(val, element_type))
return errors
def _merge(self, matches): |
Progress bar broken

```
C:\Users\Korijn\dev\myproject>conda info
Current conda install:
platform : win-64
conda version : 4.2.7
conda is private : False
conda-env version : 4.2.7
conda-build version : 2.0.1
python version : 3.5.1.final.0
requests version : 2.9.1
root environment : C:\Users\Korijn\Miniconda3 (writable)
default environment : C:\Users\Korijn\Miniconda3
envs directories : C:\Users\Korijn\Miniconda3\envs
package cache : C:\Users\Korijn\Miniconda3\pkgs
channel URLs : https://repo.continuum.io/pkgs/free/win-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/win-64/
https://repo.continuum.io/pkgs/pro/noarch/
https://repo.continuum.io/pkgs/msys2/win-64/
https://repo.continuum.io/pkgs/msys2/noarch/
config file : C:\Users\Korijn\.condarc
offline mode : False
```
| conda/base/constants.py
<|code_start|>
# -*- coding: utf-8 -*-
"""
This file should hold almost all string literals and magic numbers used throughout the code base.
The exception is if a literal is specifically meant to be private to and isolated within a module.
"""
from __future__ import absolute_import, division, print_function
import os
import sys
from logging import getLogger
from platform import machine
from enum import Enum
from conda._vendor.auxlib.collection import frozendict
log = getLogger(__name__)
class Arch(Enum):
x86 = 'x86'
x86_64 = 'x86_64'
armv6l = 'armv6l'
armv7l = 'armv7l'
ppc64le = 'ppc64le'
@classmethod
def from_sys(cls):
return cls[machine()]
class Platform(Enum):
linux = 'linux'
win = 'win32'
openbsd = 'openbsd5'
osx = 'darwin'
@classmethod
def from_sys(cls):
p = sys.platform
if p.startswith('linux'):
# Changed in version 2.7.3: Since lots of code check for sys.platform == 'linux2',
# and there is no essential change between Linux 2.x and 3.x, sys.platform is always
# set to 'linux2', even on Linux 3.x. In Python 3.3 and later, the value will always
# be set to 'linux'
p = 'linux'
return cls(p)
machine_bits = 8 * tuple.__itemsize__
# UID = os.getuid()
PWD = os.getcwd()
CONDA = 'CONDA'
CONDA_ = 'CONDA_'
conda = 'conda'
SEARCH_PATH = (
'/etc/conda/condarc',
'/etc/conda/condarc.d/',
'/var/lib/conda/condarc',
'/var/lib/conda/condarc.d/',
'$CONDA_ROOT/condarc',
'$CONDA_ROOT/.condarc',
'$CONDA_ROOT/condarc.d/',
'~/.conda/condarc',
'~/.conda/condarc.d/',
'~/.condarc',
'$CONDA_PREFIX/.condarc',
'$CONDA_PREFIX/condarc.d/',
'$CONDARC',
)
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
PLATFORM_DIRECTORIES = ("linux-64", "linux-32",
"win-64", "win-32",
"osx-64", "noarch")
RECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file')
if Platform.from_sys() is Platform.win:
DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro',
'https://repo.continuum.io/pkgs/msys2',
)
else:
DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro',
)
ROOT_ENV_NAME = 'root'
EMPTY_LIST = ()
EMPTY_MAP = frozendict()
class _Null(object):
def __nonzero__(self):
return False
NULL = _Null()
UTF8 = 'UTF-8'
<|code_end|>
| conda/base/constants.py
# -*- coding: utf-8 -*-
"""
This file should hold almost all string literals and magic numbers used throughout the code base.
The exception is if a literal is specifically meant to be private to and isolated within a module.
"""
from __future__ import absolute_import, division, print_function
import os
import sys
from logging import getLogger
from platform import machine
from enum import Enum
from conda._vendor.auxlib.collection import frozendict
log = getLogger(__name__)
class Arch(Enum):
x86 = 'x86'
x86_64 = 'x86_64'
armv6l = 'armv6l'
armv7l = 'armv7l'
ppc64le = 'ppc64le'
@classmethod
def from_sys(cls):
return cls[machine()]
class Platform(Enum):
linux = 'linux'
win = 'win32'
openbsd = 'openbsd5'
osx = 'darwin'
@classmethod
def from_sys(cls):
p = sys.platform
if p.startswith('linux'):
# Changed in version 2.7.3: Since lots of code check for sys.platform == 'linux2',
# and there is no essential change between Linux 2.x and 3.x, sys.platform is always
# set to 'linux2', even on Linux 3.x. In Python 3.3 and later, the value will always
# be set to 'linux'
p = 'linux'
return cls(p)
machine_bits = 8 * tuple.__itemsize__
# UID = os.getuid()
PWD = os.getcwd()
CONDA = 'CONDA'
CONDA_ = 'CONDA_'
conda = 'conda'
SEARCH_PATH = (
'/etc/conda/condarc',
'/etc/conda/condarc.d/',
'/var/lib/conda/condarc',
'/var/lib/conda/condarc.d/',
'$CONDA_ROOT/condarc',
'$CONDA_ROOT/.condarc',
'$CONDA_ROOT/condarc.d/',
'~/.conda/condarc',
'~/.conda/condarc.d/',
'~/.condarc',
'$CONDA_PREFIX/.condarc',
'$CONDA_PREFIX/condarc.d/',
'$CONDARC',
)
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org/'
PLATFORM_DIRECTORIES = ("linux-64", "linux-32",
"win-64", "win-32",
"osx-64", "noarch")
RECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file')
if Platform.from_sys() is Platform.win:
DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro',
'https://repo.continuum.io/pkgs/msys2',
)
else:
DEFAULT_CHANNELS = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/pro',
)
ROOT_ENV_NAME = 'root'
EMPTY_LIST = ()
EMPTY_MAP = frozendict()
class _Null(object):
def __nonzero__(self):
return False
def __bool__(self):
return False
NULL = _Null()
UTF8 = 'UTF-8'
| conda/base/constants.py
--- a/conda/base/constants.py
+++ b/conda/base/constants.py
@@ -99,6 +99,9 @@ class _Null(object):
def __nonzero__(self):
return False
+ def __bool__(self):
+ return False
+
NULL = _Null()
UTF8 = 'UTF-8' |
Conda prints stuff in stdout even with --json flag on
Conda commands with the `--json` should not print to the console anything unless it is proper json, otherwise assumptions made by parsing clients (Like the Navigator import functionality) break.
Something like
`conda env create -n qatest -f environment.yaml --json`
prints to the standard output
```
Fetching package metadata .........
Solving package specifications: ..........
{"progress": 0, "finished": false, "maxval": 10}
{"name": "openssl", "progress": 0, "finished": false, "maxval": 10}
{"name": "readline", "progress": 1, "finished": false, "maxval": 10}
{"name": "sqlite", "progress": 2, "finished": false, "maxval": 10}
{"name": "tk", "progress": 3, "finished": false, "maxval": 10}
{"name": "xz", "progress": 4, "finished": false, "maxval": 10}
{"name": "zlib", "progress": 5, "finished": false, "maxval": 10}
{"name": "python", "progress": 6, "finished": false, "maxval": 10}
{"name": "setuptools", "progress": 7, "finished": false, "maxval": 10}
{"name": "wheel", "progress": 8, "finished": false, "maxval": 10}
{"name": "pip", "progress": 9, "finished": false, "maxval": 10}
{"progress": 10, "finished": true, "maxval": 10}
```
Conda prints stuff in stdout even with --json flag on
Conda commands with the `--json` should not print to the console anything unless it is proper json, otherwise assumptions made by parsing clients (Like the Navigator import functionality) break.
Something like
`conda env create -n qatest -f environment.yaml --json`
prints to the standard output
```
Fetching package metadata .........
Solving package specifications: ..........
{"progress": 0, "finished": false, "maxval": 10}
{"name": "openssl", "progress": 0, "finished": false, "maxval": 10}
{"name": "readline", "progress": 1, "finished": false, "maxval": 10}
{"name": "sqlite", "progress": 2, "finished": false, "maxval": 10}
{"name": "tk", "progress": 3, "finished": false, "maxval": 10}
{"name": "xz", "progress": 4, "finished": false, "maxval": 10}
{"name": "zlib", "progress": 5, "finished": false, "maxval": 10}
{"name": "python", "progress": 6, "finished": false, "maxval": 10}
{"name": "setuptools", "progress": 7, "finished": false, "maxval": 10}
{"name": "wheel", "progress": 8, "finished": false, "maxval": 10}
{"name": "pip", "progress": 9, "finished": false, "maxval": 10}
{"progress": 10, "finished": true, "maxval": 10}
```
| conda_env/cli/main.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import os
import sys
try:
from conda.exceptions import conda_exception_handler
except ImportError as e:
if 'CONDA_DEFAULT_ENV' in os.environ:
sys.stderr.write("""
There was an error importing conda.
It appears this was caused by installing conda-env into a conda
environment. Like conda, conda-env needs to be installed into your
root conda/Anaconda environment.
Please deactivate your current environment, then re-install conda-env
using this command:
conda install -c conda conda-env
If you are seeing this error and have not installed conda-env into an
environment, please open a bug report at:
https://github.com/conda/conda-env
""".lstrip())
sys.exit(-1)
else:
raise e
from conda.cli.conda_argparse import ArgumentParser
from . import main_attach
from . import main_create
from . import main_export
from . import main_list
from . import main_remove
from . import main_upload
from . import main_update
from conda.base.context import context
# TODO: This belongs in a helper library somewhere
# Note: This only works with `conda-env` as a sub-command. If this gets
# merged into conda-env, this needs to be adjusted.
def show_help_on_empty_command():
if len(sys.argv) == 1: # sys.argv == ['/path/to/bin/conda-env']
sys.argv.append('--help')
def create_parser():
p = ArgumentParser()
sub_parsers = p.add_subparsers()
main_attach.configure_parser(sub_parsers)
main_create.configure_parser(sub_parsers)
main_export.configure_parser(sub_parsers)
main_list.configure_parser(sub_parsers)
main_remove.configure_parser(sub_parsers)
main_upload.configure_parser(sub_parsers)
main_update.configure_parser(sub_parsers)
show_help_on_empty_command()
return p
def main():
parser = create_parser()
args = parser.parse_args()
context._add_argparse_args(args)
return conda_exception_handler(args.func, args, parser)
if __name__ == '__main__':
sys.exit(main())
<|code_end|>
| conda_env/cli/main.py
from __future__ import print_function, division, absolute_import
from logging import getLogger, CRITICAL
import os
import sys
try:
from conda.exceptions import conda_exception_handler
except ImportError as e:
if 'CONDA_DEFAULT_ENV' in os.environ:
sys.stderr.write("""
There was an error importing conda.
It appears this was caused by installing conda-env into a conda
environment. Like conda, conda-env needs to be installed into your
root conda/Anaconda environment.
Please deactivate your current environment, then re-install conda-env
using this command:
conda install -c conda conda-env
If you are seeing this error and have not installed conda-env into an
environment, please open a bug report at:
https://github.com/conda/conda-env
""".lstrip())
sys.exit(-1)
else:
raise e
from conda.cli.conda_argparse import ArgumentParser
from . import main_attach
from . import main_create
from . import main_export
from . import main_list
from . import main_remove
from . import main_upload
from . import main_update
from conda.base.context import context
# TODO: This belongs in a helper library somewhere
# Note: This only works with `conda-env` as a sub-command. If this gets
# merged into conda-env, this needs to be adjusted.
def show_help_on_empty_command():
if len(sys.argv) == 1: # sys.argv == ['/path/to/bin/conda-env']
sys.argv.append('--help')
def create_parser():
p = ArgumentParser()
sub_parsers = p.add_subparsers()
main_attach.configure_parser(sub_parsers)
main_create.configure_parser(sub_parsers)
main_export.configure_parser(sub_parsers)
main_list.configure_parser(sub_parsers)
main_remove.configure_parser(sub_parsers)
main_upload.configure_parser(sub_parsers)
main_update.configure_parser(sub_parsers)
show_help_on_empty_command()
return p
def main():
parser = create_parser()
args = parser.parse_args()
context._add_argparse_args(args)
if getattr(args, 'json', False):
# # Silence logging info to avoid interfering with JSON output
# for logger in Logger.manager.loggerDict:
# if logger not in ('fetch', 'progress'):
# getLogger(logger).setLevel(CRITICAL + 1)
for logger in ('print', 'dotupdate', 'stdoutlog', 'stderrlog'):
getLogger(logger).setLevel(CRITICAL + 1)
return conda_exception_handler(args.func, args, parser)
if __name__ == '__main__':
sys.exit(main())
| conda_env/cli/main.py
--- a/conda_env/cli/main.py
+++ b/conda_env/cli/main.py
@@ -1,4 +1,7 @@
from __future__ import print_function, division, absolute_import
+
+from logging import getLogger, CRITICAL
+
import os
import sys
@@ -66,6 +69,14 @@ def main():
parser = create_parser()
args = parser.parse_args()
context._add_argparse_args(args)
+ if getattr(args, 'json', False):
+ # # Silence logging info to avoid interfering with JSON output
+ # for logger in Logger.manager.loggerDict:
+ # if logger not in ('fetch', 'progress'):
+ # getLogger(logger).setLevel(CRITICAL + 1)
+ for logger in ('print', 'dotupdate', 'stdoutlog', 'stderrlog'):
+ getLogger(logger).setLevel(CRITICAL + 1)
+
return conda_exception_handler(args.func, args, parser)
|
Invalid JSON output
When installing `Jupyter` I sometimes see the following error:
```
post-link :: /etc/machine-id not found ..
bus post-link :: .. using /proc/sys/kernel/random/boot_id
```
When installing with the `--json` flag the error output causes the json to be invalid. Example:
```
root@head:~# conda create -n test_env2 python jupyter -y --json -q
dbus post-link :: /etc/machine-id not found ..
dbus post-link :: .. using /proc/sys/kernel/random/boot_id
{
"actions": {
"LINK": [
"expat-2.1.0-0 1",
...
],
"PREFIX": "/opt/a/b/c/muunitnoc/anaconda/envs/test_env2",
"SYMLINK_CONDA": [
"/opt/a/b/c/muunitnoc/anaconda"
],
"op_order": [
"RM_FETCHED",
"FETCH",
"RM_EXTRACTED",
"EXTRACT",
"UNLINK",
"LINK",
"SYMLINK_CONDA"
]
},
"success": true
}
```
In my opinion this is a fairly critical -- I need to depend on valid JSON output
cc @kalefranz @koverholt @mingwandroid
Invalid JSON output
When installing `Jupyter` I sometimes see the following error:
```
post-link :: /etc/machine-id not found ..
bus post-link :: .. using /proc/sys/kernel/random/boot_id
```
When installing with the `--json` flag the error output causes the json to be invalid. Example:
```
root@head:~# conda create -n test_env2 python jupyter -y --json -q
dbus post-link :: /etc/machine-id not found ..
dbus post-link :: .. using /proc/sys/kernel/random/boot_id
{
"actions": {
"LINK": [
"expat-2.1.0-0 1",
...
],
"PREFIX": "/opt/a/b/c/muunitnoc/anaconda/envs/test_env2",
"SYMLINK_CONDA": [
"/opt/a/b/c/muunitnoc/anaconda"
],
"op_order": [
"RM_FETCHED",
"FETCH",
"RM_EXTRACTED",
"EXTRACT",
"UNLINK",
"LINK",
"SYMLINK_CONDA"
]
},
"success": true
}
```
In my opinion this is a fairly critical -- I need to depend on valid JSON output
cc @kalefranz @koverholt @mingwandroid
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import struct
import subprocess
import sys
import tarfile
import traceback
from collections import namedtuple
from enum import Enum
from itertools import chain
from os.path import (abspath, basename, dirname, exists, isdir, isfile, islink, join, normcase,
normpath)
from . import CondaError
from .base.constants import UTF8
from .base.context import context
from .common.disk import exp_backoff_fn, rm_rf
from .common.url import path_to_url
from .exceptions import CondaOSError, LinkError, PaddingError
from .lock import DirectoryLock, FileLock
from .models.channel import Channel
from .utils import on_win
# conda-build compatibility
from .common.disk import delete_trash, move_to_trash, move_path_to_trash # NOQA
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise CondaOSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise CondaOSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise CondaOSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
SHEBANG_REGEX = re.compile(br'^(#!((?:\\ |[^ \n\r])+)(.*))')
class FileMode(Enum):
text = 'text'
binary = 'binary'
def __str__(self):
return "%s" % self.value
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise CondaError("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def yield_lines(path):
"""Generator function for lines in file. Empty generator if path does not exist.
Args:
path (str): path to file
Returns:
iterator: each line in file, not starting with '#'
"""
try:
with open(path) as fh:
for line in fh:
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
raise StopIteration
else:
raise
PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
# backwards compatibility for conda-build
prefix_placeholder = PREFIX_PLACEHOLDER
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filepaths to tuples(placeholder, FileMode)
A line in `has_prefix` contains one of
* filepath
* placeholder mode filepath
mode values are one of
* text
* binary
"""
ParseResult = namedtuple('ParseResult', ('placeholder', 'filemode', 'filepath'))
def parse_line(line):
# placeholder, filemode, filepath
parts = tuple(x.strip('"\'') for x in shlex.split(line, posix=False))
if len(parts) == 1:
return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0])
elif len(parts) == 3:
return ParseResult(parts[0], FileMode(parts[1]), parts[2])
else:
raise RuntimeError("Invalid has_prefix file at path: %s" % path)
parsed_lines = (parse_line(line) for line in yield_lines(path))
return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines}
class _PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
if on_win and has_pyzzer_entry_point(data):
return replace_pyzzer_entry_point_shebang(data, a, b)
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise _PaddingError
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
data = pat.sub(replace, data)
assert len(data) == original_data_len
return data
def replace_long_shebang(mode, data):
if mode is FileMode.text:
shebang_match = SHEBANG_REGEX.match(data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode(UTF8).split('/')[-1]
new_shebang = '#!/usr/bin/env %s%s' % (executable_name, options.decode(UTF8))
data = data.replace(whole_shebang, new_shebang.encode(UTF8))
else:
# TODO: binary shebangs exist; figure this out in the future if text works well
log.debug("TODO: binary shebangs exist; figure this out in the future if text works well")
return data
def has_pyzzer_entry_point(data):
pos = data.rfind(b'PK\x05\x06')
return pos >= 0
def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
"""Code adapted from pyzzer. This is meant to deal with entry point exe's created by distlib,
which consist of a launcher, then a shebang, then a zip archive of the entry point code to run.
We need to change the shebang.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112 # NOQA
"""
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
launcher = shebang = None
pos = all_data.rfind(b'PK\x05\x06')
if pos >= 0:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
if data and shebang and launcher:
if hasattr(placeholder, 'encode'):
placeholder = placeholder.encode('utf-8')
if hasattr(new_prefix, 'encode'):
new_prefix = new_prefix.encode('utf-8')
shebang = shebang.replace(placeholder, new_prefix)
all_data = b"".join([launcher, shebang, data])
return all_data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode is FileMode.text:
data = data.replace(placeholder.encode(UTF8), new_prefix.encode(UTF8))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode(UTF8), new_prefix.encode(UTF8))
else:
raise RuntimeError("Invalid mode: %r" % mode)
return data
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
if on_win and mode is FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (str(parts[0]), str(parts[1]), str(parts[2]), str(channel))
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ.copy()
env[str('ROOT_PREFIX')] = sys.prefix
env[str('PREFIX')] = str(env_prefix or prefix)
env[str('PKG_NAME')], env[str('PKG_VERSION')], env[str('PKG_BUILDNUM')], _ = dist2quad(dist)
if action == 'pre-link':
env[str('SOURCE_DIR')] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode(UTF8)
except IOError:
pass
return None
def read_no_link(info_dir):
return set(chain(yield_lines(join(info_dir, 'no_link')),
yield_lines(join(info_dir, 'no_softlink'))))
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
rm_rf(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = url
schannel = Channel(url).canonical_name
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
# import pdb; pdb.set_trace()
for pdir in context.pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in context.pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[path_to_url(fname)]
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("File not removed during RM_FETCHED instruction: %s", fname)
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_FETCHED instruction: %s", fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_EXTRACTED instruction: %s", fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
path = fname[:-8]
with FileLock(path):
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None, ignore_channels=False):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = Channel(url).url_channel_wtf
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
if ignore_channels:
linked_data_[prefix][dname] = rec
else:
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
rm_rf(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix, ignore_channels=False):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5], ignore_channels=ignore_channels)
return recs
def linked(prefix, ignore_channels=False):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix, ignore_channels=ignore_channels).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
log.debug("linking package %s with link type %s", dist, linktype)
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
raise LinkError('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
# for the lock issue
# may run into lock if prefix not exist
if not isdir(prefix):
os.makedirs(prefix)
with DirectoryLock(prefix), FileLock(source_dir):
for filepath in files:
src = join(source_dir, filepath)
dst = join(prefix, filepath)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.info("file exists, but clobbering: %r" % dst)
rm_rf(dst)
lt = linktype
if filepath in has_prefix_files or filepath in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
raise CondaOSError('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for filepath in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[filepath]
try:
update_prefix(join(prefix, filepath), prefix, placeholder, mode)
except _PaddingError:
raise PaddingError(dist, placeholder, len(placeholder))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if context.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
raise LinkError("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
if isfile(alt_files_path):
# alt_files_path is a hack for noarch
meta_dict['files'] = list(yield_lines(alt_files_path))
else:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with DirectoryLock(prefix):
log.debug("unlinking package %s", dist)
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
# remove empty directories
for path in sorted(dst_dirs2, key=len, reverse=True):
if isdir(path) and not os.listdir(path):
rm_rf(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import struct
import subprocess
import sys
import tarfile
import traceback
from collections import namedtuple
from enum import Enum
from itertools import chain
from os.path import (abspath, basename, dirname, exists, isdir, isfile, islink, join, normcase,
normpath)
from . import CondaError
from .base.constants import UTF8
from .base.context import context
from .common.disk import exp_backoff_fn, rm_rf
from .common.url import path_to_url
from .exceptions import CondaOSError, LinkError, PaddingError
from .lock import DirectoryLock, FileLock
from .models.channel import Channel
from .utils import on_win
# conda-build compatibility
from .common.disk import delete_trash, move_to_trash, move_path_to_trash # NOQA
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise CondaOSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise CondaOSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise CondaOSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
SHEBANG_REGEX = re.compile(br'^(#!((?:\\ |[^ \n\r])+)(.*))')
class FileMode(Enum):
text = 'text'
binary = 'binary'
def __str__(self):
return "%s" % self.value
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise CondaError("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def yield_lines(path):
"""Generator function for lines in file. Empty generator if path does not exist.
Args:
path (str): path to file
Returns:
iterator: each line in file, not starting with '#'
"""
try:
with open(path) as fh:
for line in fh:
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
raise StopIteration
else:
raise
PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
# backwards compatibility for conda-build
prefix_placeholder = PREFIX_PLACEHOLDER
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filepaths to tuples(placeholder, FileMode)
A line in `has_prefix` contains one of
* filepath
* placeholder mode filepath
mode values are one of
* text
* binary
"""
ParseResult = namedtuple('ParseResult', ('placeholder', 'filemode', 'filepath'))
def parse_line(line):
# placeholder, filemode, filepath
parts = tuple(x.strip('"\'') for x in shlex.split(line, posix=False))
if len(parts) == 1:
return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0])
elif len(parts) == 3:
return ParseResult(parts[0], FileMode(parts[1]), parts[2])
else:
raise RuntimeError("Invalid has_prefix file at path: %s" % path)
parsed_lines = (parse_line(line) for line in yield_lines(path))
return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines}
class _PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
if on_win and has_pyzzer_entry_point(data):
return replace_pyzzer_entry_point_shebang(data, a, b)
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise _PaddingError
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
data = pat.sub(replace, data)
assert len(data) == original_data_len
return data
def replace_long_shebang(mode, data):
if mode is FileMode.text:
shebang_match = SHEBANG_REGEX.match(data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode(UTF8).split('/')[-1]
new_shebang = '#!/usr/bin/env %s%s' % (executable_name, options.decode(UTF8))
data = data.replace(whole_shebang, new_shebang.encode(UTF8))
else:
# TODO: binary shebangs exist; figure this out in the future if text works well
log.debug("TODO: binary shebangs exist; figure this out in the future if text works well")
return data
def has_pyzzer_entry_point(data):
pos = data.rfind(b'PK\x05\x06')
return pos >= 0
def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
"""Code adapted from pyzzer. This is meant to deal with entry point exe's created by distlib,
which consist of a launcher, then a shebang, then a zip archive of the entry point code to run.
We need to change the shebang.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112 # NOQA
"""
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
launcher = shebang = None
pos = all_data.rfind(b'PK\x05\x06')
if pos >= 0:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
if data and shebang and launcher:
if hasattr(placeholder, 'encode'):
placeholder = placeholder.encode('utf-8')
if hasattr(new_prefix, 'encode'):
new_prefix = new_prefix.encode('utf-8')
shebang = shebang.replace(placeholder, new_prefix)
all_data = b"".join([launcher, shebang, data])
return all_data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode is FileMode.text:
data = data.replace(placeholder.encode(UTF8), new_prefix.encode(UTF8))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode(UTF8), new_prefix.encode(UTF8))
else:
raise RuntimeError("Invalid mode: %r" % mode)
return data
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
if on_win and mode is FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (str(parts[0]), str(parts[1]), str(parts[2]), str(channel))
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ.copy()
env[str('ROOT_PREFIX')] = sys.prefix
env[str('PREFIX')] = str(env_prefix or prefix)
env[str('PKG_NAME')], env[str('PKG_VERSION')], env[str('PKG_BUILDNUM')], _ = dist2quad(dist)
if action == 'pre-link':
env[str('SOURCE_DIR')] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode(UTF8)
except IOError:
pass
return None
def read_no_link(info_dir):
return set(chain(yield_lines(join(info_dir, 'no_link')),
yield_lines(join(info_dir, 'no_softlink'))))
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
rm_rf(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = url
schannel = Channel(url).canonical_name
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
# import pdb; pdb.set_trace()
for pdir in context.pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in context.pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[path_to_url(fname)]
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("File not removed during RM_FETCHED instruction: %s", fname)
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_FETCHED instruction: %s", fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_EXTRACTED instruction: %s", fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
path = fname[:-8]
with FileLock(path):
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None, ignore_channels=False):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = Channel(url).url_channel_wtf
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
if ignore_channels:
linked_data_[prefix][dname] = rec
else:
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
rm_rf(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix, ignore_channels=False):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5], ignore_channels=ignore_channels)
return recs
def linked(prefix, ignore_channels=False):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix, ignore_channels=ignore_channels).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
log.debug("linking package %s with link type %s", dist, linktype)
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
raise LinkError('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
# for the lock issue
# may run into lock if prefix not exist
if not isdir(prefix):
os.makedirs(prefix)
with DirectoryLock(prefix), FileLock(source_dir):
for filepath in files:
src = join(source_dir, filepath)
dst = join(prefix, filepath)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.info("file exists, but clobbering: %r" % dst)
rm_rf(dst)
lt = linktype
if filepath in has_prefix_files or filepath in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
raise CondaOSError('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for filepath in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[filepath]
try:
update_prefix(join(prefix, filepath), prefix, placeholder, mode)
except _PaddingError:
raise PaddingError(dist, placeholder, len(placeholder))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if context.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
raise LinkError("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
if isfile(alt_files_path):
# alt_files_path is a hack for noarch
meta_dict['files'] = list(yield_lines(alt_files_path))
else:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with DirectoryLock(prefix):
log.debug("unlinking package %s", dist)
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
# remove empty directories
for path in sorted(dst_dirs2, key=len, reverse=True):
if isdir(path) and not os.listdir(path):
rm_rf(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
fh = sys.stderr if context.json else sys.stdout
fh.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -1046,7 +1046,8 @@ def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
- sys.stdout.write(fi.read())
+ fh = sys.stderr if context.json else sys.stdout
+ fh.write(fi.read())
except IOError:
pass
finally: |
conda update icu (54.1-0 --> 56.1-4 conda-forge)
In a new installation, it appears that going from icu 54 to 56 will fail unless the following is done (at least on linux):
bash Anaconda2-4.2.0-Linux-x86_64.sh
conda remove icu
rm -r $HOME/anaconda2/lib/icu
conda install -c conda-forge icu=56.1
In other words, using the first and fourth lines alone fails with:
CondaOSError: OS error: failed to link (src=u'/home/anaconda2/pkgs/icu-56.1-4/lib/icu/current', dst='/home/anaconda2/lib/icu/current', type=3, error=OSError(17, 'File exists'))
| conda/common/disk.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from errno import EACCES, EEXIST, ENOENT, EPERM
from itertools import chain
from logging import getLogger
from os import W_OK, access, chmod, getpid, listdir, lstat, makedirs, rename, unlink, walk
from os.path import abspath, basename, dirname, isdir, join, lexists
from shutil import rmtree
from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISLNK, S_ISREG, S_IWRITE
from time import sleep
from uuid import uuid4
from ..compat import lchmod, text_type
from ..utils import on_win
__all__ = ["rm_rf", "exp_backoff_fn", "try_write"]
log = getLogger(__name__)
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
Args:
dir_path (str): directory to test write access
heavy (bool): Actually create and delete a file, or do a faster os.access test.
https://docs.python.org/dev/library/os.html?highlight=xattr#os.access
Returns:
bool
"""
if not isdir(dir_path):
return False
if on_win or heavy:
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
backoff_unlink(temp_filename)
return True
except (IOError, OSError):
return False
finally:
backoff_unlink(temp_filename)
else:
return access(dir_path, W_OK)
def backoff_unlink(file_or_symlink_path):
def _unlink(path):
make_writable(path)
unlink(path)
try:
exp_backoff_fn(lambda f: lexists(f) and _unlink(f), file_or_symlink_path)
except (IOError, OSError) as e:
if e.errno not in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
def backoff_rmdir(dirpath):
if not isdir(dirpath):
return
# shutil.rmtree:
# if onerror is set, it is called to handle the error with arguments (func, path, exc_info)
# where func is os.listdir, os.remove, or os.rmdir;
# path is the argument to that function that caused it to fail; and
# exc_info is a tuple returned by sys.exc_info() ==> (type, value, traceback).
def retry(func, path, exc_info):
if getattr(exc_info[1], 'errno', None) == ENOENT:
return
recursive_make_writable(dirname(path))
func(path)
def _rmdir(path):
try:
recursive_make_writable(path)
exp_backoff_fn(rmtree, path, onerror=retry)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
for root, dirs, files in walk(dirpath, topdown=False):
for file in files:
backoff_unlink(join(root, file))
for dir in dirs:
_rmdir(join(root, dir))
_rmdir(dirpath)
def make_writable(path):
try:
mode = lstat(path).st_mode
if S_ISDIR(mode):
chmod(path, S_IMODE(mode) | S_IWRITE | S_IEXEC)
elif S_ISREG(mode):
chmod(path, S_IMODE(mode) | S_IWRITE)
elif S_ISLNK(mode):
lchmod(path, S_IMODE(mode) | S_IWRITE)
else:
log.debug("path cannot be made writable: %s", path)
except Exception as e:
eno = getattr(e, 'errno', None)
if eno in (ENOENT,):
log.debug("tried to make writable, but didn't exist: %s", path)
raise
elif eno in (EACCES, EPERM):
log.debug("tried make writable but failed: %s\n%r", path, e)
else:
log.warn("Error making path writable: %s\n%r", path, e)
raise
def recursive_make_writable(path):
# The need for this function was pointed out at
# https://github.com/conda/conda/issues/3266#issuecomment-239241915
# Especially on windows, file removal will often fail because it is marked read-only
if isdir(path):
for root, dirs, files in walk(path):
for path in chain.from_iterable((files, dirs)):
try:
exp_backoff_fn(make_writable, join(root, path))
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
else:
exp_backoff_fn(make_writable, path)
def exp_backoff_fn(fn, *args, **kwargs):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args, **kwargs)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
max_tries = 7
for n in range(max_tries):
try:
result = fn(*args, **kwargs)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (EPERM, EACCES):
if n == max_tries-1:
raise
sleep_time = ((2 ** n) + random.random()) * 0.1
caller_frame = sys._getframe(1)
log.debug("retrying %s/%s %s() in %g sec",
basename(caller_frame.f_code.co_filename),
caller_frame.f_lineno, fn.__name__,
sleep_time)
sleep(sleep_time)
elif e.errno in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
else:
log.warn("Uncaught backoff with errno %d", e.errno)
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is 5. This only applies
to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
try:
path = abspath(path)
log.debug("rm_rf %s", path)
if isdir(path):
try:
# On Windows, always move to trash first.
if trash and on_win:
move_result = move_path_to_trash(path, preclean=False)
if move_result:
return True
backoff_rmdir(path)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
from conda.install import delete_linked_data_any
delete_linked_data_any(path)
elif lexists(path):
try:
backoff_unlink(path)
return True
except (OSError, IOError) as e:
log.debug("%r errno %d\nCannot unlink %s.", e, e.errno, path)
if trash:
move_result = move_path_to_trash(path)
if move_result:
return True
log.info("Failed to remove %s.", path)
else:
log.debug("rm_rf failed. Not a link, file, or directory: %s", path)
return True
finally:
if lexists(path):
log.info("rm_rf failed for %s", path)
return False
def delete_trash(prefix=None):
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not lexists(trash_dir):
log.debug("Trash directory %s doesn't exist. Moving on.", trash_dir)
continue
log.debug("removing trash for %s", trash_dir)
for p in listdir(trash_dir):
path = join(trash_dir, p)
try:
if isdir(path):
backoff_rmdir(path)
else:
backoff_unlink(path)
except (IOError, OSError) as e:
log.info("Could not delete path in trash dir %s\n%r", path, e)
if listdir(trash_dir):
log.info("Unable to clean trash directory %s", trash_dir)
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
makedirs(trash_dir)
except (IOError, OSError) as e1:
if e1.errno != EEXIST:
continue
trash_file = join(trash_dir, text_type(uuid4()))
try:
rename(path, trash_file)
except (IOError, OSError) as e:
log.debug("Could not move %s to %s.\n%r", path, trash_file, e)
else:
log.debug("Moved to trash: %s", path)
from ..install import delete_linked_data_any
delete_linked_data_any(path)
return True
return False
<|code_end|>
| conda/common/disk.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from errno import EACCES, EEXIST, ENOENT, EPERM
from itertools import chain
from logging import getLogger
from os import W_OK, access, chmod, getpid, listdir, lstat, makedirs, rename, unlink, walk
from os.path import abspath, basename, dirname, isdir, isfile, islink, join, lexists
from shutil import rmtree
from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISLNK, S_ISREG, S_IWRITE
from time import sleep
from uuid import uuid4
from ..compat import lchmod, text_type
from ..utils import on_win
__all__ = ["rm_rf", "exp_backoff_fn", "try_write"]
log = getLogger(__name__)
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
Args:
dir_path (str): directory to test write access
heavy (bool): Actually create and delete a file, or do a faster os.access test.
https://docs.python.org/dev/library/os.html?highlight=xattr#os.access
Returns:
bool
"""
if not isdir(dir_path):
return False
if on_win or heavy:
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
backoff_unlink(temp_filename)
return True
except (IOError, OSError):
return False
finally:
backoff_unlink(temp_filename)
else:
return access(dir_path, W_OK)
def backoff_unlink(file_or_symlink_path):
def _unlink(path):
make_writable(path)
unlink(path)
try:
exp_backoff_fn(lambda f: lexists(f) and _unlink(f), file_or_symlink_path)
except (IOError, OSError) as e:
if e.errno not in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
def backoff_rmdir(dirpath):
if not isdir(dirpath):
return
# shutil.rmtree:
# if onerror is set, it is called to handle the error with arguments (func, path, exc_info)
# where func is os.listdir, os.remove, or os.rmdir;
# path is the argument to that function that caused it to fail; and
# exc_info is a tuple returned by sys.exc_info() ==> (type, value, traceback).
def retry(func, path, exc_info):
if getattr(exc_info[1], 'errno', None) == ENOENT:
return
recursive_make_writable(dirname(path))
func(path)
def _rmdir(path):
try:
recursive_make_writable(path)
exp_backoff_fn(rmtree, path, onerror=retry)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
for root, dirs, files in walk(dirpath, topdown=False):
for file in files:
backoff_unlink(join(root, file))
for dir in dirs:
_rmdir(join(root, dir))
_rmdir(dirpath)
def make_writable(path):
try:
mode = lstat(path).st_mode
if S_ISDIR(mode):
chmod(path, S_IMODE(mode) | S_IWRITE | S_IEXEC)
elif S_ISREG(mode):
chmod(path, S_IMODE(mode) | S_IWRITE)
elif S_ISLNK(mode):
lchmod(path, S_IMODE(mode) | S_IWRITE)
else:
log.debug("path cannot be made writable: %s", path)
except Exception as e:
eno = getattr(e, 'errno', None)
if eno in (ENOENT,):
log.debug("tried to make writable, but didn't exist: %s", path)
raise
elif eno in (EACCES, EPERM):
log.debug("tried make writable but failed: %s\n%r", path, e)
else:
log.warn("Error making path writable: %s\n%r", path, e)
raise
def recursive_make_writable(path):
# The need for this function was pointed out at
# https://github.com/conda/conda/issues/3266#issuecomment-239241915
# Especially on windows, file removal will often fail because it is marked read-only
if isdir(path):
for root, dirs, files in walk(path):
for path in chain.from_iterable((files, dirs)):
try:
exp_backoff_fn(make_writable, join(root, path))
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
else:
exp_backoff_fn(make_writable, path)
def exp_backoff_fn(fn, *args, **kwargs):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args, **kwargs)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
max_tries = 7
for n in range(max_tries):
try:
result = fn(*args, **kwargs)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (EPERM, EACCES):
if n == max_tries-1:
raise
sleep_time = ((2 ** n) + random.random()) * 0.1
caller_frame = sys._getframe(1)
log.debug("retrying %s/%s %s() in %g sec",
basename(caller_frame.f_code.co_filename),
caller_frame.f_lineno, fn.__name__,
sleep_time)
sleep(sleep_time)
elif e.errno in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
else:
log.warn("Uncaught backoff with errno %d", e.errno)
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is 5. This only applies
to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
try:
path = abspath(path)
log.debug("rm_rf %s", path)
if isdir(path):
try:
# On Windows, always move to trash first.
if trash and on_win:
move_result = move_path_to_trash(path, preclean=False)
if move_result:
return True
backoff_rmdir(path)
finally:
# If path was removed, ensure it's not in linked_data_
if islink(path) or isfile(path):
from conda.install import delete_linked_data_any
delete_linked_data_any(path)
if lexists(path):
try:
backoff_unlink(path)
return True
except (OSError, IOError) as e:
log.debug("%r errno %d\nCannot unlink %s.", e, e.errno, path)
if trash:
move_result = move_path_to_trash(path)
if move_result:
return True
log.info("Failed to remove %s.", path)
else:
log.debug("rm_rf failed. Not a link, file, or directory: %s", path)
return True
finally:
if lexists(path):
log.info("rm_rf failed for %s", path)
return False
def delete_trash(prefix=None):
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not lexists(trash_dir):
log.debug("Trash directory %s doesn't exist. Moving on.", trash_dir)
continue
log.debug("removing trash for %s", trash_dir)
for p in listdir(trash_dir):
path = join(trash_dir, p)
try:
if isdir(path):
backoff_rmdir(path)
else:
backoff_unlink(path)
except (IOError, OSError) as e:
log.info("Could not delete path in trash dir %s\n%r", path, e)
if listdir(trash_dir):
log.info("Unable to clean trash directory %s", trash_dir)
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
makedirs(trash_dir)
except (IOError, OSError) as e1:
if e1.errno != EEXIST:
continue
trash_file = join(trash_dir, text_type(uuid4()))
try:
rename(path, trash_file)
except (IOError, OSError) as e:
log.debug("Could not move %s to %s.\n%r", path, trash_file, e)
else:
log.debug("Moved to trash: %s", path)
from ..install import delete_linked_data_any
delete_linked_data_any(path)
return True
return False
| conda/common/disk.py
--- a/conda/common/disk.py
+++ b/conda/common/disk.py
@@ -6,7 +6,7 @@
from itertools import chain
from logging import getLogger
from os import W_OK, access, chmod, getpid, listdir, lstat, makedirs, rename, unlink, walk
-from os.path import abspath, basename, dirname, isdir, join, lexists
+from os.path import abspath, basename, dirname, isdir, isfile, islink, join, lexists
from shutil import rmtree
from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISLNK, S_ISREG, S_IWRITE
from time import sleep
@@ -192,10 +192,10 @@ def rm_rf(path, max_retries=5, trash=True):
backoff_rmdir(path)
finally:
# If path was removed, ensure it's not in linked_data_
- if not isdir(path):
+ if islink(path) or isfile(path):
from conda.install import delete_linked_data_any
delete_linked_data_any(path)
- elif lexists(path):
+ if lexists(path):
try:
backoff_unlink(path)
return True |
BUG: CONDARC env var broken in latest conda
After upgrading to conda version 4.2.7 the CONDARC env var no longer supports filenames of any format. It appears to only support filenames that end with .yml or .condarc.
This is a functionality regression bug. Can this be fixed immediately!?
**conda info**
Current conda install:
```
platform : osx-64
conda version : 4.2.7
conda is private : False
conda-env version : 4.2.7
conda-build version : 1.21.3
python version : 2.7.12.final.0
requests version : 2.9.1
root environment : /opt/anaconda (writable)
default environment : /opt/anaconda
envs directories : /opt/anaconda/envs
package cache : /opt/anaconda/pkgs
channel URLs : https://repo.continuum.io/pkgs/free/osx-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/osx-64/
https://repo.continuum.io/pkgs/pro/noarch/
https://conda.anaconda.org/r/osx-64/
https://conda.anaconda.org/r/noarch/
config file : /Users/jhull/.condarc
offline mode : False
```
**export CONDARC=~/.condarc.cloud**
**conda info**
Traceback (most recent call last):
File "/opt/anaconda/bin/conda", line 4, in <module>
import conda.cli
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/**init**.py", line 8, in <module>
from .main import main # NOQA
File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 46, in <module>
from ..base.context import context
File "/opt/anaconda/lib/python2.7/site-packages/conda/base/context.py", line 252, in <module>
context = Context(SEARCH_PATH, conda, None)
File "/opt/anaconda/lib/python2.7/site-packages/conda/common/configuration.py", line 692, in **init**
self._add_search_path(search_path)
File "/opt/anaconda/lib/python2.7/site-packages/conda/common/configuration.py", line 699, in _add_search_path
return self._add_raw_data(load_file_configs(search_path))
File "/opt/anaconda/lib/python2.7/site-packages/conda/common/configuration.py", line 371, in load_file_configs
raw_data = odict(kv for kv in chain.from_iterable(load_paths))
File "/opt/anaconda/lib/python2.7/collections.py", line 69, in __init__
self.__update(_args, *_kwds)
File "/opt/anaconda/lib/python2.7/_abcoll.py", line 571, in update
for key, value in other:
File "/opt/anaconda/lib/python2.7/site-packages/conda/common/configuration.py", line 371, in <genexpr>
raw_data = odict(kv for kv in chain.from_iterable(load_paths))
File "/opt/anaconda/lib/python2.7/site-packages/conda/common/configuration.py", line 346, in _file_yaml_loader
**assert fullpath.endswith(".yml") or fullpath.endswith("condarc"), fullpath
AssertionError: /Users/jhull/.condarc.cloud**
| conda/common/configuration.py
<|code_start|>
# -*- coding: utf-8 -*-
"""
A generalized application configuration utility.
Features include:
- lazy eval
- merges configuration files
- parameter type validation, with custom validation
- parameter aliases
Easily extensible to other source formats, e.g. json and ini
Limitations:
- at the moment only supports a "flat" config structure; no nested data structures
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
from collections import Mapping, Set, defaultdict
from enum import Enum
from glob import glob
from itertools import chain
from logging import getLogger
from os import environ, stat
from os.path import join
from stat import S_IFDIR, S_IFMT, S_IFREG
try:
from cytoolz.dicttoolz import merge
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concat, concatv, unique
except ImportError:
from .._vendor.toolz.dicttoolz import merge
from .._vendor.toolz.functoolz import excepts
from .._vendor.toolz.itertoolz import concat, concatv, unique
try:
from ruamel_yaml.comments import CommentedSeq, CommentedMap
except ImportError: # pragma: no cover
from ruamel.yaml.comments import CommentedSeq, CommentedMap # pragma: no cover
from .. import CondaError, CondaMultiError
from .._vendor.auxlib.collection import first, frozendict, last, AttrDict
from .._vendor.auxlib.exceptions import ThisShouldNeverHappenError
from .._vendor.auxlib.path import expand
from .._vendor.auxlib.type_coercion import typify_data_structure, TypeCoercionError
from ..base.constants import EMPTY_MAP, NULL
from .compat import (isiterable, iteritems, odict, primitive_types, text_type,
with_metaclass, string_types, itervalues)
from .yaml import yaml_load
__all__ = ["Configuration", "PrimitiveParameter",
"SequenceParameter", "MapParameter"]
log = getLogger(__name__)
def pretty_list(iterable, padding=' '): # TODO: move elsewhere in conda.common
if not isiterable(iterable):
iterable = [iterable]
return '\n'.join("%s- %s" % (padding, item) for item in iterable)
def pretty_map(dictionary, padding=' '):
return '\n'.join("%s%s: %s" % (padding, key, value) for key, value in iteritems(dictionary))
class ConfigurationError(CondaError):
pass
class ValidationError(ConfigurationError):
def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs):
self.parameter_name = parameter_name
self.parameter_value = parameter_value
self.source = source
super(ConfigurationError, self).__init__(msg, **kwargs)
def __str__(self):
return ("Parameter %s = %r declared in %s is invalid."
% (self.parameter_name, self.parameter_value, self.source))
class MultipleKeysError(ValidationError):
def __init__(self, source, keys, preferred_key):
self.source = source
self.keys = keys
msg = ("Multiple aliased keys in file %s:\n"
"%s"
"Must declare only one. Prefer '%s'" % (source, pretty_list(keys), preferred_key))
super(MultipleKeysError, self).__init__(preferred_key, None, source, msg=msg)
class InvalidTypeError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None):
self.wrong_type = wrong_type
self.valid_types = valid_types
if msg is None:
msg = ("Parameter %s = %r declared in %s has type %s.\n"
"Valid types: %s." % (parameter_name, parameter_value,
source, wrong_type, pretty_list(valid_types)))
super(InvalidTypeError, self).__init__(parameter_name, parameter_value, source, msg=msg)
class InvalidElementTypeError(InvalidTypeError):
def __init__(self, parameter_name, parameter_value, source, wrong_type,
valid_types, index_or_key):
qualifier = "at index" if isinstance(index_or_key, int) else "for key"
msg = ("Parameter %s declared in %s has invalid element %r %s %s.\n"
"Valid element types:\n"
"%s." % (parameter_name, source, parameter_value, qualifier,
index_or_key, pretty_list(valid_types)))
super(InvalidElementTypeError, self).__init__(parameter_name, parameter_value, source,
wrong_type, valid_types, msg=msg)
class CustomValidationError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, custom_message):
msg = ("Parameter %s = %r declared in %s is invalid.\n"
"%s" % (parameter_name, parameter_value, source, custom_message))
super(CustomValidationError, self).__init__(parameter_name, parameter_value, source,
msg=msg)
class MultiValidationError(CondaMultiError, ConfigurationError):
def __init__(self, errors, *args, **kwargs):
super(MultiValidationError, self).__init__(errors, *args, **kwargs)
def raise_errors(errors):
if not errors:
return True
elif len(errors) == 1:
raise errors[0]
else:
raise MultiValidationError(errors)
class ParameterFlag(Enum):
final = 'final'
top = "top"
bottom = "bottom"
def __str__(self):
return "%s" % self.value
@classmethod
def from_name(cls, name):
return cls[name]
@classmethod
def from_value(cls, value):
return cls(value)
@classmethod
def from_string(cls, string):
try:
string = string.strip('!#')
return cls.from_value(string)
except (ValueError, AttributeError):
return None
# TODO: move elsewhere, probably auxlib
# TODO: need to add order to at least frozendict, and preferrably frozenset
def make_immutable(value):
if isinstance(value, Mapping):
return frozendict(value)
elif isinstance(value, Set):
return frozenset(value)
elif isiterable(value):
return tuple(value)
else:
return value
@with_metaclass(ABCMeta)
class RawParameter(object):
def __init__(self, source, key, raw_value):
self.source = source
self.key = key
self._raw_value = raw_value
def __repr__(self):
return text_type(vars(self))
@abstractmethod
def value(self, parameter_obj):
raise NotImplementedError()
@abstractmethod
def keyflag(self):
raise NotImplementedError()
@abstractmethod
def valueflags(self, parameter_obj):
raise NotImplementedError()
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key])) for key in from_map)
return EMPTY_MAP
class EnvRawParameter(RawParameter):
source = 'envvars'
def value(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
raw_value = self._raw_value
if string_delimiter in raw_value:
value = raw_value.split(string_delimiter)
else:
value = [raw_value]
return tuple(v.strip() for v in value)
else:
return self.__important_split_value[0].strip()
def keyflag(self):
return ParameterFlag.final if len(self.__important_split_value) >= 2 else None
def valueflags(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
return tuple('' for _ in self._raw_value.split(string_delimiter))
else:
return self.__important_split_value[0].strip()
@property
def __important_split_value(self):
return self._raw_value.split("!important")
@classmethod
def make_raw_parameters(cls, appname):
keystart = "{0}_".format(appname.upper())
raw_env = dict((k.replace(keystart, '', 1).lower(), v)
for k, v in iteritems(environ) if k.startswith(keystart))
return super(EnvRawParameter, cls).make_raw_parameters(EnvRawParameter.source, raw_env)
class ArgParseRawParameter(RawParameter):
source = 'cmd_line'
def value(self, parameter_obj):
return make_immutable(self._raw_value)
def keyflag(self):
return None
def valueflags(self, parameter_obj):
return None
@classmethod
def make_raw_parameters(cls, args_from_argparse):
return super(ArgParseRawParameter, cls).make_raw_parameters(ArgParseRawParameter.source,
args_from_argparse)
class YamlRawParameter(RawParameter):
# this class should encapsulate all direct use of ruamel.yaml in this module
def __init__(self, source, key, raw_value, keycomment):
self._keycomment = keycomment
super(YamlRawParameter, self).__init__(source, key, raw_value)
def value(self, parameter_obj):
self.__process(parameter_obj)
return self._value
def keyflag(self):
return ParameterFlag.from_string(self._keycomment)
def valueflags(self, parameter_obj):
self.__process(parameter_obj)
return self._valueflags
def __process(self, parameter_obj):
if hasattr(self, '_value'):
return
elif isinstance(self._raw_value, CommentedSeq):
valuecomments = self._get_yaml_list_comments(self._raw_value)
self._valueflags = tuple(ParameterFlag.from_string(s) for s in valuecomments)
self._value = tuple(self._raw_value)
elif isinstance(self._raw_value, CommentedMap):
valuecomments = self._get_yaml_map_comments(self._raw_value)
self._valueflags = dict((k, ParameterFlag.from_string(v))
for k, v in iteritems(valuecomments) if v is not None)
self._value = frozendict(self._raw_value)
elif isinstance(self._raw_value, primitive_types):
self._valueflags = None
self._value = self._raw_value
else:
raise ThisShouldNeverHappenError() # pragma: no cover
@staticmethod
def _get_yaml_key_comment(commented_dict, key):
try:
return commented_dict.ca.items[key][2].value.strip()
except (AttributeError, KeyError):
return None
@staticmethod
def _get_yaml_list_comments(value):
items = value.ca.items
raw_comment_lines = tuple(excepts((AttributeError, KeyError, TypeError),
lambda q: items.get(q)[0].value.strip() or None,
lambda _: None # default value on exception
)(q)
for q in range(len(value)))
return raw_comment_lines
@staticmethod
def _get_yaml_map_comments(rawvalue):
return dict((key, excepts(KeyError,
lambda k: rawvalue.ca.items[k][2].value.strip() or None,
lambda _: None # default value on exception
)(key))
for key in rawvalue)
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key],
cls._get_yaml_key_comment(from_map, key)))
for key in from_map)
return EMPTY_MAP
@classmethod
def make_raw_parameters_from_file(cls, filepath):
with open(filepath, 'r') as fh:
ruamel_yaml = yaml_load(fh)
return cls.make_raw_parameters(filepath, ruamel_yaml) or EMPTY_MAP
def load_file_configs(search_path):
# returns an ordered map of filepath and dict of raw parameter objects
def _file_yaml_loader(fullpath):
assert fullpath.endswith(".yml") or fullpath.endswith("condarc"), fullpath
yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath)
def _dir_yaml_loader(fullpath):
for filepath in glob(join(fullpath, "*.yml")):
yield filepath, YamlRawParameter.make_raw_parameters_from_file(filepath)
# map a stat result to a file loader or a directory loader
_loader = {
S_IFREG: _file_yaml_loader,
S_IFDIR: _dir_yaml_loader,
}
def _get_st_mode(path):
# stat the path for file type, or None if path doesn't exist
try:
return S_IFMT(stat(path).st_mode)
except OSError:
return None
expanded_paths = tuple(expand(path) for path in search_path)
stat_paths = (_get_st_mode(path) for path in expanded_paths)
load_paths = (_loader[st_mode](path)
for path, st_mode in zip(expanded_paths, stat_paths)
if st_mode is not None)
raw_data = odict(kv for kv in chain.from_iterable(load_paths))
return raw_data
@with_metaclass(ABCMeta)
class Parameter(object):
_type = None
_element_type = None
def __init__(self, default, aliases=(), validation=None):
self._name = None
self._names = None
self.default = default
self.aliases = aliases
self._validation = validation
def _set_name(self, name):
# this is an explicit method, and not a descriptor/setter
# it's meant to be called by the Configuration metaclass
self._name = name
self._names = frozenset(x for x in chain(self.aliases, (name, )))
return name
@property
def name(self):
if self._name is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._name
@property
def names(self):
if self._names is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._names
def _raw_parameters_from_single_source(self, raw_parameters):
# while supporting parameter name aliases, we enforce that only one definition is given
# per data source
keys = self.names & frozenset(raw_parameters.keys())
matches = {key: raw_parameters[key] for key in keys}
numkeys = len(keys)
if numkeys == 0:
return None, None
elif numkeys == 1:
return next(itervalues(matches)), None
elif self.name in keys:
return matches[self.name], MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
else:
return None, MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
def _get_all_matches(self, instance):
# a match is a raw parameter instance
matches = []
multikey_exceptions = []
for filepath, raw_parameters in iteritems(instance.raw_data):
match, error = self._raw_parameters_from_single_source(raw_parameters)
if match is not None:
matches.append(match)
if error:
multikey_exceptions.append(error)
return matches, multikey_exceptions
@abstractmethod
def _merge(self, matches):
raise NotImplementedError()
def __get__(self, instance, instance_type):
# strategy is "extract and merge," which is actually just map and reduce
# extract matches from each source in SEARCH_PATH
# then merge matches together
if self.name in instance._cache:
return instance._cache[self.name]
matches, errors = self._get_all_matches(instance)
try:
result = typify_data_structure(self._merge(matches) if matches else self.default,
self._element_type)
except TypeCoercionError as e:
errors.append(CustomValidationError(self.name, e.value, "<<merged>>", text_type(e)))
else:
errors.extend(self.collect_errors(instance, result))
raise_errors(errors)
instance._cache[self.name] = result
return result
def collect_errors(self, instance, value, source="<<merged>>"):
"""Validate a Parameter value.
Args:
instance (Configuration): The instance object to which the Parameter descriptor is
attached.
value: The value to be validated.
"""
errors = []
if not isinstance(value, self._type):
errors.append(InvalidTypeError(self.name, value, source, type(value),
self._type))
elif self._validation is not None:
result = self._validation(value)
if result is False:
errors.append(ValidationError(self.name, value, source))
elif isinstance(result, string_types):
errors.append(CustomValidationError(self.name, value, source, result))
return errors
def _match_key_is_important(self, raw_parameter):
return raw_parameter.keyflag() is ParameterFlag.final
def _first_important_matches(self, matches):
idx = first(enumerate(matches), lambda x: self._match_key_is_important(x[1]),
apply=lambda x: x[0])
return matches if idx is None else matches[:idx+1]
@staticmethod
def _str_format_flag(flag):
return " #!%s" % flag if flag is not None else ''
@staticmethod
def _str_format_value(value):
if value is None:
return 'None'
return value
@classmethod
def repr_raw(cls, raw_parameter):
raise NotImplementedError()
class PrimitiveParameter(Parameter):
"""Parameter type for a Configuration class that holds a single python primitive value.
The python primitive types are str, int, float, complex, bool, and NoneType. In addition,
python 2 has long and unicode types.
"""
def __init__(self, default, aliases=(), validation=None, parameter_type=None):
"""
Args:
default (Any): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
parameter_type (type or Tuple[type]): Type-validation of parameter's value. If None,
type(default) is used.
"""
self._type = type(default) if parameter_type is None else parameter_type
self._element_type = self._type
super(PrimitiveParameter, self).__init__(default, aliases, validation)
def _merge(self, matches):
important_match = first(matches, self._match_key_is_important, default=None)
if important_match is not None:
return important_match.value(self)
last_match = last(matches, lambda x: x is not None, default=None)
if last_match is not None:
return last_match.value(self)
raise ThisShouldNeverHappenError() # pragma: no cover
def repr_raw(self, raw_parameter):
return "%s: %s%s" % (raw_parameter.key,
self._str_format_value(raw_parameter.value(self)),
self._str_format_flag(raw_parameter.keyflag()))
class SequenceParameter(Parameter):
"""Parameter type for a Configuration class that holds a sequence (i.e. list) of python
primitive values.
"""
_type = tuple
def __init__(self, element_type, default=(), aliases=(), validation=None,
string_delimiter=','):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element in
the sequence.
default (Iterable[str]): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
self.string_delimiter = string_delimiter
super(SequenceParameter, self).__init__(default, aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(SequenceParameter, self).collect_errors(instance, value)
element_type = self._element_type
for idx, element in enumerate(value):
if not isinstance(element, element_type):
errors.append(InvalidElementTypeError(self.name, element, source,
type(element), element_type, idx))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# get individual lines from important_matches that were marked important
# these will be prepended to the final result
def get_marked_lines(match, marker, parameter_obj):
return tuple(line
for line, flag in zip(match.value(parameter_obj),
match.valueflags(parameter_obj))
if flag is marker)
top_lines = concat(get_marked_lines(m, ParameterFlag.top, self) for m in relevant_matches)
# also get lines that were marked as bottom, but reverse the match order so that lines
# coming earlier will ultimately be last
bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom, self) for m in
reversed(relevant_matches))
# now, concat all lines, while reversing the matches
# reverse because elements closer to the end of search path take precedence
all_lines = concat(m.value(self) for m in reversed(relevant_matches))
# stack top_lines + all_lines, then de-dupe
top_deduped = tuple(unique(concatv(top_lines, all_lines)))
# take the top-deduped lines, reverse them, and concat with reversed bottom_lines
# this gives us the reverse of the order we want, but almost there
# NOTE: for a line value marked both top and bottom, the bottom marker will win out
# for the top marker to win out, we'd need one additional de-dupe step
bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))
# just reverse, and we're good to go
return tuple(reversed(tuple(bottom_deduped)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for q, value in enumerate(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self)[q]
lines.append(" - %s%s" % (self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class MapParameter(Parameter):
"""Parameter type for a Configuration class that holds a map (i.e. dict) of python
primitive values.
"""
_type = dict
def __init__(self, element_type, default=None, aliases=(), validation=None):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element.
default (Mapping): The parameter's default value. If None, will be an empty dict.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
super(MapParameter, self).__init__(default or dict(), aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(MapParameter, self).collect_errors(instance, value)
if isinstance(value, Mapping):
element_type = self._element_type
errors.extend(InvalidElementTypeError(self.name, val, source, type(val),
element_type, key)
for key, val in iteritems(value) if not isinstance(val, element_type))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# mapkeys with important matches
def key_is_important(match, key):
return match.valueflags(self).get(key) is ParameterFlag.final
important_maps = tuple(dict((k, v)
for k, v in iteritems(match.value(self))
if key_is_important(match, k))
for match in relevant_matches)
# dump all matches in a dict
# then overwrite with important matches
return merge(concatv((m.value(self) for m in relevant_matches),
reversed(important_maps)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for valuekey, value in iteritems(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self).get(valuekey)
lines.append(" %s: %s%s" % (valuekey, self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class ConfigurationType(type):
"""metaclass for Configuration"""
def __init__(cls, name, bases, attr):
super(ConfigurationType, cls).__init__(name, bases, attr)
# call _set_name for each parameter
cls.parameter_names = tuple(p._set_name(name) for name, p in iteritems(cls.__dict__)
if isinstance(p, Parameter))
@with_metaclass(ConfigurationType)
class Configuration(object):
def __init__(self, search_path=(), app_name=None, argparse_args=None):
self.raw_data = odict()
self._cache = dict()
self._validation_errors = defaultdict(list)
if search_path:
self._add_search_path(search_path)
if app_name is not None:
self._add_env_vars(app_name)
if argparse_args is not None:
self._add_argparse_args(argparse_args)
def _add_search_path(self, search_path):
return self._add_raw_data(load_file_configs(search_path))
def _add_env_vars(self, app_name):
self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters(app_name)
self._cache = dict()
return self
def _add_argparse_args(self, argparse_args):
self._argparse_args = AttrDict((k, v) for k, v, in iteritems(vars(argparse_args))
if v is not NULL)
source = ArgParseRawParameter.source
self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(self._argparse_args)
self._cache = dict()
return self
def _add_raw_data(self, raw_data):
self.raw_data.update(raw_data)
self._cache = dict()
return self
def check_source(self, source):
# this method ends up duplicating much of the logic of Parameter.__get__
# I haven't yet found a way to make it more DRY though
typed_values = {}
validation_errors = []
raw_parameters = self.raw_data[source]
for key in self.parameter_names:
parameter = self.__class__.__dict__[key]
match, multikey_error = parameter._raw_parameters_from_single_source(raw_parameters)
if multikey_error:
validation_errors.append(multikey_error)
if match is not None:
try:
typed_value = typify_data_structure(match.value(parameter),
parameter._element_type)
except TypeCoercionError as e:
validation_errors.append(CustomValidationError(match.key, e.value,
match.source, text_type(e)))
else:
collected_errors = parameter.collect_errors(self, typed_value, match.source)
if collected_errors:
validation_errors.extend(collected_errors)
else:
typed_values[match.key] = typed_value # parameter.repr_raw(match)
else:
# this situation will happen if there is a multikey_error and none of the
# matched keys is the primary key
pass
return typed_values, validation_errors
def validate_all(self):
validation_errors = list(chain.from_iterable(self.check_source(source)[1]
for source in self.raw_data))
raise_errors(validation_errors)
def collect_all(self):
typed_values = odict()
validation_errors = odict()
for source in self.raw_data:
typed_values[source], validation_errors[source] = self.check_source(source)
raise_errors(tuple(chain.from_iterable(itervalues(validation_errors))))
return odict((k, v) for k, v in iteritems(typed_values) if v)
<|code_end|>
| conda/common/configuration.py
# -*- coding: utf-8 -*-
"""
A generalized application configuration utility.
Features include:
- lazy eval
- merges configuration files
- parameter type validation, with custom validation
- parameter aliases
Easily extensible to other source formats, e.g. json and ini
Limitations:
- at the moment only supports a "flat" config structure; no nested data structures
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
from collections import Mapping, Set, defaultdict
from enum import Enum
from glob import glob
from itertools import chain
from logging import getLogger
from os import environ, stat
from os.path import join, basename
from stat import S_IFDIR, S_IFMT, S_IFREG
try:
from cytoolz.dicttoolz import merge
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concat, concatv, unique
except ImportError:
from .._vendor.toolz.dicttoolz import merge
from .._vendor.toolz.functoolz import excepts
from .._vendor.toolz.itertoolz import concat, concatv, unique
try:
from ruamel_yaml.comments import CommentedSeq, CommentedMap
except ImportError: # pragma: no cover
from ruamel.yaml.comments import CommentedSeq, CommentedMap # pragma: no cover
from .. import CondaError, CondaMultiError
from .._vendor.auxlib.collection import first, frozendict, last, AttrDict
from .._vendor.auxlib.exceptions import ThisShouldNeverHappenError
from .._vendor.auxlib.path import expand
from .._vendor.auxlib.type_coercion import typify_data_structure, TypeCoercionError
from ..base.constants import EMPTY_MAP, NULL
from .compat import (isiterable, iteritems, odict, primitive_types, text_type,
with_metaclass, string_types, itervalues)
from .yaml import yaml_load
__all__ = ["Configuration", "PrimitiveParameter",
"SequenceParameter", "MapParameter"]
log = getLogger(__name__)
def pretty_list(iterable, padding=' '): # TODO: move elsewhere in conda.common
if not isiterable(iterable):
iterable = [iterable]
return '\n'.join("%s- %s" % (padding, item) for item in iterable)
def pretty_map(dictionary, padding=' '):
return '\n'.join("%s%s: %s" % (padding, key, value) for key, value in iteritems(dictionary))
class ConfigurationError(CondaError):
pass
class ValidationError(ConfigurationError):
def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs):
self.parameter_name = parameter_name
self.parameter_value = parameter_value
self.source = source
super(ConfigurationError, self).__init__(msg, **kwargs)
def __str__(self):
return ("Parameter %s = %r declared in %s is invalid."
% (self.parameter_name, self.parameter_value, self.source))
class MultipleKeysError(ValidationError):
def __init__(self, source, keys, preferred_key):
self.source = source
self.keys = keys
msg = ("Multiple aliased keys in file %s:\n"
"%s"
"Must declare only one. Prefer '%s'" % (source, pretty_list(keys), preferred_key))
super(MultipleKeysError, self).__init__(preferred_key, None, source, msg=msg)
class InvalidTypeError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None):
self.wrong_type = wrong_type
self.valid_types = valid_types
if msg is None:
msg = ("Parameter %s = %r declared in %s has type %s.\n"
"Valid types: %s." % (parameter_name, parameter_value,
source, wrong_type, pretty_list(valid_types)))
super(InvalidTypeError, self).__init__(parameter_name, parameter_value, source, msg=msg)
class InvalidElementTypeError(InvalidTypeError):
def __init__(self, parameter_name, parameter_value, source, wrong_type,
valid_types, index_or_key):
qualifier = "at index" if isinstance(index_or_key, int) else "for key"
msg = ("Parameter %s declared in %s has invalid element %r %s %s.\n"
"Valid element types:\n"
"%s." % (parameter_name, source, parameter_value, qualifier,
index_or_key, pretty_list(valid_types)))
super(InvalidElementTypeError, self).__init__(parameter_name, parameter_value, source,
wrong_type, valid_types, msg=msg)
class CustomValidationError(ValidationError):
def __init__(self, parameter_name, parameter_value, source, custom_message):
msg = ("Parameter %s = %r declared in %s is invalid.\n"
"%s" % (parameter_name, parameter_value, source, custom_message))
super(CustomValidationError, self).__init__(parameter_name, parameter_value, source,
msg=msg)
class MultiValidationError(CondaMultiError, ConfigurationError):
def __init__(self, errors, *args, **kwargs):
super(MultiValidationError, self).__init__(errors, *args, **kwargs)
def raise_errors(errors):
if not errors:
return True
elif len(errors) == 1:
raise errors[0]
else:
raise MultiValidationError(errors)
class ParameterFlag(Enum):
final = 'final'
top = "top"
bottom = "bottom"
def __str__(self):
return "%s" % self.value
@classmethod
def from_name(cls, name):
return cls[name]
@classmethod
def from_value(cls, value):
return cls(value)
@classmethod
def from_string(cls, string):
try:
string = string.strip('!#')
return cls.from_value(string)
except (ValueError, AttributeError):
return None
# TODO: move elsewhere, probably auxlib
# TODO: need to add order to at least frozendict, and preferrably frozenset
def make_immutable(value):
if isinstance(value, Mapping):
return frozendict(value)
elif isinstance(value, Set):
return frozenset(value)
elif isiterable(value):
return tuple(value)
else:
return value
@with_metaclass(ABCMeta)
class RawParameter(object):
def __init__(self, source, key, raw_value):
self.source = source
self.key = key
self._raw_value = raw_value
def __repr__(self):
return text_type(vars(self))
@abstractmethod
def value(self, parameter_obj):
raise NotImplementedError()
@abstractmethod
def keyflag(self):
raise NotImplementedError()
@abstractmethod
def valueflags(self, parameter_obj):
raise NotImplementedError()
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key])) for key in from_map)
return EMPTY_MAP
class EnvRawParameter(RawParameter):
source = 'envvars'
def value(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
raw_value = self._raw_value
if string_delimiter in raw_value:
value = raw_value.split(string_delimiter)
else:
value = [raw_value]
return tuple(v.strip() for v in value)
else:
return self.__important_split_value[0].strip()
def keyflag(self):
return ParameterFlag.final if len(self.__important_split_value) >= 2 else None
def valueflags(self, parameter_obj):
if hasattr(parameter_obj, 'string_delimiter'):
string_delimiter = getattr(parameter_obj, 'string_delimiter')
# TODO: add stripping of !important, !top, and !bottom
return tuple('' for _ in self._raw_value.split(string_delimiter))
else:
return self.__important_split_value[0].strip()
@property
def __important_split_value(self):
return self._raw_value.split("!important")
@classmethod
def make_raw_parameters(cls, appname):
keystart = "{0}_".format(appname.upper())
raw_env = dict((k.replace(keystart, '', 1).lower(), v)
for k, v in iteritems(environ) if k.startswith(keystart))
return super(EnvRawParameter, cls).make_raw_parameters(EnvRawParameter.source, raw_env)
class ArgParseRawParameter(RawParameter):
source = 'cmd_line'
def value(self, parameter_obj):
return make_immutable(self._raw_value)
def keyflag(self):
return None
def valueflags(self, parameter_obj):
return None
@classmethod
def make_raw_parameters(cls, args_from_argparse):
return super(ArgParseRawParameter, cls).make_raw_parameters(ArgParseRawParameter.source,
args_from_argparse)
class YamlRawParameter(RawParameter):
# this class should encapsulate all direct use of ruamel.yaml in this module
def __init__(self, source, key, raw_value, keycomment):
self._keycomment = keycomment
super(YamlRawParameter, self).__init__(source, key, raw_value)
def value(self, parameter_obj):
self.__process(parameter_obj)
return self._value
def keyflag(self):
return ParameterFlag.from_string(self._keycomment)
def valueflags(self, parameter_obj):
self.__process(parameter_obj)
return self._valueflags
def __process(self, parameter_obj):
if hasattr(self, '_value'):
return
elif isinstance(self._raw_value, CommentedSeq):
valuecomments = self._get_yaml_list_comments(self._raw_value)
self._valueflags = tuple(ParameterFlag.from_string(s) for s in valuecomments)
self._value = tuple(self._raw_value)
elif isinstance(self._raw_value, CommentedMap):
valuecomments = self._get_yaml_map_comments(self._raw_value)
self._valueflags = dict((k, ParameterFlag.from_string(v))
for k, v in iteritems(valuecomments) if v is not None)
self._value = frozendict(self._raw_value)
elif isinstance(self._raw_value, primitive_types):
self._valueflags = None
self._value = self._raw_value
else:
raise ThisShouldNeverHappenError() # pragma: no cover
@staticmethod
def _get_yaml_key_comment(commented_dict, key):
try:
return commented_dict.ca.items[key][2].value.strip()
except (AttributeError, KeyError):
return None
@staticmethod
def _get_yaml_list_comments(value):
items = value.ca.items
raw_comment_lines = tuple(excepts((AttributeError, KeyError, TypeError),
lambda q: items.get(q)[0].value.strip() or None,
lambda _: None # default value on exception
)(q)
for q in range(len(value)))
return raw_comment_lines
@staticmethod
def _get_yaml_map_comments(rawvalue):
return dict((key, excepts(KeyError,
lambda k: rawvalue.ca.items[k][2].value.strip() or None,
lambda _: None # default value on exception
)(key))
for key in rawvalue)
@classmethod
def make_raw_parameters(cls, source, from_map):
if from_map:
return dict((key, cls(source, key, from_map[key],
cls._get_yaml_key_comment(from_map, key)))
for key in from_map)
return EMPTY_MAP
@classmethod
def make_raw_parameters_from_file(cls, filepath):
with open(filepath, 'r') as fh:
ruamel_yaml = yaml_load(fh)
return cls.make_raw_parameters(filepath, ruamel_yaml) or EMPTY_MAP
def load_file_configs(search_path):
# returns an ordered map of filepath and dict of raw parameter objects
def _file_yaml_loader(fullpath):
assert fullpath.endswith((".yml", ".yaml")) or "condarc" in basename(fullpath), fullpath
yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath)
def _dir_yaml_loader(fullpath):
for filepath in glob(join(fullpath, "*.yml")):
yield filepath, YamlRawParameter.make_raw_parameters_from_file(filepath)
# map a stat result to a file loader or a directory loader
_loader = {
S_IFREG: _file_yaml_loader,
S_IFDIR: _dir_yaml_loader,
}
def _get_st_mode(path):
# stat the path for file type, or None if path doesn't exist
try:
return S_IFMT(stat(path).st_mode)
except OSError:
return None
expanded_paths = tuple(expand(path) for path in search_path)
stat_paths = (_get_st_mode(path) for path in expanded_paths)
load_paths = (_loader[st_mode](path)
for path, st_mode in zip(expanded_paths, stat_paths)
if st_mode is not None)
raw_data = odict(kv for kv in chain.from_iterable(load_paths))
return raw_data
@with_metaclass(ABCMeta)
class Parameter(object):
_type = None
_element_type = None
def __init__(self, default, aliases=(), validation=None):
self._name = None
self._names = None
self.default = default
self.aliases = aliases
self._validation = validation
def _set_name(self, name):
# this is an explicit method, and not a descriptor/setter
# it's meant to be called by the Configuration metaclass
self._name = name
self._names = frozenset(x for x in chain(self.aliases, (name, )))
return name
@property
def name(self):
if self._name is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._name
@property
def names(self):
if self._names is None:
# The Configuration metaclass should call the `_set_name` method.
raise ThisShouldNeverHappenError() # pragma: no cover
return self._names
def _raw_parameters_from_single_source(self, raw_parameters):
# while supporting parameter name aliases, we enforce that only one definition is given
# per data source
keys = self.names & frozenset(raw_parameters.keys())
matches = {key: raw_parameters[key] for key in keys}
numkeys = len(keys)
if numkeys == 0:
return None, None
elif numkeys == 1:
return next(itervalues(matches)), None
elif self.name in keys:
return matches[self.name], MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
else:
return None, MultipleKeysError(raw_parameters[next(iter(keys))].source,
keys, self.name)
def _get_all_matches(self, instance):
# a match is a raw parameter instance
matches = []
multikey_exceptions = []
for filepath, raw_parameters in iteritems(instance.raw_data):
match, error = self._raw_parameters_from_single_source(raw_parameters)
if match is not None:
matches.append(match)
if error:
multikey_exceptions.append(error)
return matches, multikey_exceptions
@abstractmethod
def _merge(self, matches):
raise NotImplementedError()
def __get__(self, instance, instance_type):
# strategy is "extract and merge," which is actually just map and reduce
# extract matches from each source in SEARCH_PATH
# then merge matches together
if self.name in instance._cache:
return instance._cache[self.name]
matches, errors = self._get_all_matches(instance)
try:
result = typify_data_structure(self._merge(matches) if matches else self.default,
self._element_type)
except TypeCoercionError as e:
errors.append(CustomValidationError(self.name, e.value, "<<merged>>", text_type(e)))
else:
errors.extend(self.collect_errors(instance, result))
raise_errors(errors)
instance._cache[self.name] = result
return result
def collect_errors(self, instance, value, source="<<merged>>"):
"""Validate a Parameter value.
Args:
instance (Configuration): The instance object to which the Parameter descriptor is
attached.
value: The value to be validated.
"""
errors = []
if not isinstance(value, self._type):
errors.append(InvalidTypeError(self.name, value, source, type(value),
self._type))
elif self._validation is not None:
result = self._validation(value)
if result is False:
errors.append(ValidationError(self.name, value, source))
elif isinstance(result, string_types):
errors.append(CustomValidationError(self.name, value, source, result))
return errors
def _match_key_is_important(self, raw_parameter):
return raw_parameter.keyflag() is ParameterFlag.final
def _first_important_matches(self, matches):
idx = first(enumerate(matches), lambda x: self._match_key_is_important(x[1]),
apply=lambda x: x[0])
return matches if idx is None else matches[:idx+1]
@staticmethod
def _str_format_flag(flag):
return " #!%s" % flag if flag is not None else ''
@staticmethod
def _str_format_value(value):
if value is None:
return 'None'
return value
@classmethod
def repr_raw(cls, raw_parameter):
raise NotImplementedError()
class PrimitiveParameter(Parameter):
"""Parameter type for a Configuration class that holds a single python primitive value.
The python primitive types are str, int, float, complex, bool, and NoneType. In addition,
python 2 has long and unicode types.
"""
def __init__(self, default, aliases=(), validation=None, parameter_type=None):
"""
Args:
default (Any): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
parameter_type (type or Tuple[type]): Type-validation of parameter's value. If None,
type(default) is used.
"""
self._type = type(default) if parameter_type is None else parameter_type
self._element_type = self._type
super(PrimitiveParameter, self).__init__(default, aliases, validation)
def _merge(self, matches):
important_match = first(matches, self._match_key_is_important, default=None)
if important_match is not None:
return important_match.value(self)
last_match = last(matches, lambda x: x is not None, default=None)
if last_match is not None:
return last_match.value(self)
raise ThisShouldNeverHappenError() # pragma: no cover
def repr_raw(self, raw_parameter):
return "%s: %s%s" % (raw_parameter.key,
self._str_format_value(raw_parameter.value(self)),
self._str_format_flag(raw_parameter.keyflag()))
class SequenceParameter(Parameter):
"""Parameter type for a Configuration class that holds a sequence (i.e. list) of python
primitive values.
"""
_type = tuple
def __init__(self, element_type, default=(), aliases=(), validation=None,
string_delimiter=','):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element in
the sequence.
default (Iterable[str]): The parameter's default value.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
self.string_delimiter = string_delimiter
super(SequenceParameter, self).__init__(default, aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(SequenceParameter, self).collect_errors(instance, value)
element_type = self._element_type
for idx, element in enumerate(value):
if not isinstance(element, element_type):
errors.append(InvalidElementTypeError(self.name, element, source,
type(element), element_type, idx))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# get individual lines from important_matches that were marked important
# these will be prepended to the final result
def get_marked_lines(match, marker, parameter_obj):
return tuple(line
for line, flag in zip(match.value(parameter_obj),
match.valueflags(parameter_obj))
if flag is marker)
top_lines = concat(get_marked_lines(m, ParameterFlag.top, self) for m in relevant_matches)
# also get lines that were marked as bottom, but reverse the match order so that lines
# coming earlier will ultimately be last
bottom_lines = concat(get_marked_lines(m, ParameterFlag.bottom, self) for m in
reversed(relevant_matches))
# now, concat all lines, while reversing the matches
# reverse because elements closer to the end of search path take precedence
all_lines = concat(m.value(self) for m in reversed(relevant_matches))
# stack top_lines + all_lines, then de-dupe
top_deduped = tuple(unique(concatv(top_lines, all_lines)))
# take the top-deduped lines, reverse them, and concat with reversed bottom_lines
# this gives us the reverse of the order we want, but almost there
# NOTE: for a line value marked both top and bottom, the bottom marker will win out
# for the top marker to win out, we'd need one additional de-dupe step
bottom_deduped = unique(concatv(reversed(tuple(bottom_lines)), reversed(top_deduped)))
# just reverse, and we're good to go
return tuple(reversed(tuple(bottom_deduped)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for q, value in enumerate(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self)[q]
lines.append(" - %s%s" % (self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class MapParameter(Parameter):
"""Parameter type for a Configuration class that holds a map (i.e. dict) of python
primitive values.
"""
_type = dict
def __init__(self, element_type, default=None, aliases=(), validation=None):
"""
Args:
element_type (type or Iterable[type]): The generic type of each element.
default (Mapping): The parameter's default value. If None, will be an empty dict.
aliases (Iterable[str]): Alternate names for the parameter.
validation (callable): Given a parameter value as input, return a boolean indicating
validity, or alternately return a string describing an invalid value.
"""
self._element_type = element_type
super(MapParameter, self).__init__(default or dict(), aliases, validation)
def collect_errors(self, instance, value, source="<<merged>>"):
errors = super(MapParameter, self).collect_errors(instance, value)
if isinstance(value, Mapping):
element_type = self._element_type
errors.extend(InvalidElementTypeError(self.name, val, source, type(val),
element_type, key)
for key, val in iteritems(value) if not isinstance(val, element_type))
return errors
def _merge(self, matches):
# get matches up to and including first important_match
# but if no important_match, then all matches are important_matches
relevant_matches = self._first_important_matches(matches)
# mapkeys with important matches
def key_is_important(match, key):
return match.valueflags(self).get(key) is ParameterFlag.final
important_maps = tuple(dict((k, v)
for k, v in iteritems(match.value(self))
if key_is_important(match, k))
for match in relevant_matches)
# dump all matches in a dict
# then overwrite with important matches
return merge(concatv((m.value(self) for m in relevant_matches),
reversed(important_maps)))
def repr_raw(self, raw_parameter):
lines = list()
lines.append("%s:%s" % (raw_parameter.key,
self._str_format_flag(raw_parameter.keyflag())))
for valuekey, value in iteritems(raw_parameter.value(self)):
valueflag = raw_parameter.valueflags(self).get(valuekey)
lines.append(" %s: %s%s" % (valuekey, self._str_format_value(value),
self._str_format_flag(valueflag)))
return '\n'.join(lines)
class ConfigurationType(type):
"""metaclass for Configuration"""
def __init__(cls, name, bases, attr):
super(ConfigurationType, cls).__init__(name, bases, attr)
# call _set_name for each parameter
cls.parameter_names = tuple(p._set_name(name) for name, p in iteritems(cls.__dict__)
if isinstance(p, Parameter))
@with_metaclass(ConfigurationType)
class Configuration(object):
def __init__(self, search_path=(), app_name=None, argparse_args=None):
self.raw_data = odict()
self._cache = dict()
self._validation_errors = defaultdict(list)
if search_path:
self._add_search_path(search_path)
if app_name is not None:
self._add_env_vars(app_name)
if argparse_args is not None:
self._add_argparse_args(argparse_args)
def _add_search_path(self, search_path):
return self._add_raw_data(load_file_configs(search_path))
def _add_env_vars(self, app_name):
self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters(app_name)
self._cache = dict()
return self
def _add_argparse_args(self, argparse_args):
self._argparse_args = AttrDict((k, v) for k, v, in iteritems(vars(argparse_args))
if v is not NULL)
source = ArgParseRawParameter.source
self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(self._argparse_args)
self._cache = dict()
return self
def _add_raw_data(self, raw_data):
self.raw_data.update(raw_data)
self._cache = dict()
return self
def check_source(self, source):
# this method ends up duplicating much of the logic of Parameter.__get__
# I haven't yet found a way to make it more DRY though
typed_values = {}
validation_errors = []
raw_parameters = self.raw_data[source]
for key in self.parameter_names:
parameter = self.__class__.__dict__[key]
match, multikey_error = parameter._raw_parameters_from_single_source(raw_parameters)
if multikey_error:
validation_errors.append(multikey_error)
if match is not None:
try:
typed_value = typify_data_structure(match.value(parameter),
parameter._element_type)
except TypeCoercionError as e:
validation_errors.append(CustomValidationError(match.key, e.value,
match.source, text_type(e)))
else:
collected_errors = parameter.collect_errors(self, typed_value, match.source)
if collected_errors:
validation_errors.extend(collected_errors)
else:
typed_values[match.key] = typed_value # parameter.repr_raw(match)
else:
# this situation will happen if there is a multikey_error and none of the
# matched keys is the primary key
pass
return typed_values, validation_errors
def validate_all(self):
validation_errors = list(chain.from_iterable(self.check_source(source)[1]
for source in self.raw_data))
raise_errors(validation_errors)
def collect_all(self):
typed_values = odict()
validation_errors = odict()
for source in self.raw_data:
typed_values[source], validation_errors[source] = self.check_source(source)
raise_errors(tuple(chain.from_iterable(itervalues(validation_errors))))
return odict((k, v) for k, v in iteritems(typed_values) if v)
| conda/common/configuration.py
--- a/conda/common/configuration.py
+++ b/conda/common/configuration.py
@@ -23,7 +23,7 @@
from itertools import chain
from logging import getLogger
from os import environ, stat
-from os.path import join
+from os.path import join, basename
from stat import S_IFDIR, S_IFMT, S_IFREG
try:
@@ -343,7 +343,7 @@ def load_file_configs(search_path):
# returns an ordered map of filepath and dict of raw parameter objects
def _file_yaml_loader(fullpath):
- assert fullpath.endswith(".yml") or fullpath.endswith("condarc"), fullpath
+ assert fullpath.endswith((".yml", ".yaml")) or "condarc" in basename(fullpath), fullpath
yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath)
def _dir_yaml_loader(fullpath): |
Removal of handle_proxy_407 breaks conda-build
Elevating as issue from https://github.com/conda/conda/commit/b993816b39a48d807e7a9659246266f6035c3dcd#commitcomment-19452072 so that it does not get lost.
Removal of handle_proxy_407 breaks conda-build (it is used in pypi skeleton code for some reason.) Is there an alternative that I can use instead? Please provide example code.
| conda/exports.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from functools import partial
from warnings import warn
from conda import compat, plan
compat = compat
plan = plan
from conda.api import get_index # NOQA
get_index = get_index
from conda.cli.common import (Completer, InstalledPackages, add_parser_channels, add_parser_prefix, # NOQA
specs_from_args, spec_from_line, specs_from_url) # NOQA
Completer, InstalledPackages = Completer, InstalledPackages
add_parser_channels, add_parser_prefix = add_parser_channels, add_parser_prefix
specs_from_args, spec_from_line = specs_from_args, spec_from_line
specs_from_url = specs_from_url
from conda.cli.conda_argparse import ArgumentParser # NOQA
ArgumentParser = ArgumentParser
from conda.compat import (PY3, StringIO, configparser, input, iteritems, lchmod, string_types, # NOQA
text_type, TemporaryDirectory) # NOQA
PY3, StringIO, configparser, input = PY3, StringIO, configparser, input
iteritems, lchmod, string_types = iteritems, lchmod, string_types
text_type, TemporaryDirectory = text_type, TemporaryDirectory
from conda.connection import CondaSession # NOQA
CondaSession = CondaSession
from conda.fetch import TmpDownload, download, fetch_index # NOQA
TmpDownload, download, fetch_index = TmpDownload, download, fetch_index
handle_proxy_407 = lambda x, y: warn("handle_proxy_407 is deprecated. "
"Now handled by CondaSession.")
from conda.install import (delete_trash, is_linked, linked, linked_data, move_to_trash, # NOQA
prefix_placeholder, rm_rf, symlink_conda, rm_fetched, package_cache) # NOQA
delete_trash, is_linked, linked = delete_trash, is_linked, linked
linked_data, move_to_trash = linked_data, move_to_trash
prefix_placeholder, rm_rf, symlink_conda = prefix_placeholder, rm_rf, symlink_conda
rm_fetched, package_cache = rm_fetched, package_cache
from conda.lock import Locked # NOQA
Locked = Locked
from conda.misc import untracked, walk_prefix # NOQA
untracked, walk_prefix = untracked, walk_prefix
from conda.resolve import MatchSpec, NoPackagesFound, Resolve, Unsatisfiable, normalized_version # NOQA
MatchSpec, NoPackagesFound, Resolve = MatchSpec, NoPackagesFound, Resolve
Unsatisfiable, normalized_version = Unsatisfiable, normalized_version
from conda.signature import KEYS, KEYS_DIR, hash_file, verify # NOQA
KEYS, KEYS_DIR = KEYS, KEYS_DIR
hash_file, verify = hash_file, verify
from conda.utils import (human_bytes, hashsum_file, md5_file, memoized, unix_path_to_win, # NOQA
win_path_to_unix, url_path) # NOQA
human_bytes, hashsum_file, md5_file = human_bytes, hashsum_file, md5_file
memoized, unix_path_to_win = memoized, unix_path_to_win
win_path_to_unix, url_path = win_path_to_unix, url_path
from conda.config import sys_rc_path # NOQA
sys_rc_path = sys_rc_path
from conda.version import VersionOrder # NOQA
VersionOrder = VersionOrder
import conda.base.context # NOQA
import conda.exceptions # NOQA
from conda.base.context import get_prefix as context_get_prefix, non_x86_linux_machines # NOQA
non_x86_linux_machines = non_x86_linux_machines
from conda.base.constants import DEFAULT_CHANNELS # NOQA
get_prefix = partial(context_get_prefix, conda.base.context.context)
get_default_urls = lambda: DEFAULT_CHANNELS
arch_name = conda.base.context.context.arch_name
binstar_upload = conda.base.context.context.binstar_upload
bits = conda.base.context.context.bits
default_prefix = conda.base.context.context.default_prefix
default_python = conda.base.context.context.default_python
envs_dirs = conda.base.context.context.envs_dirs
pkgs_dirs = conda.base.context.context.pkgs_dirs
platform = conda.base.context.context.platform
root_dir = conda.base.context.context.root_dir
root_writable = conda.base.context.context.root_writable
subdir = conda.base.context.context.subdir
from conda.models.channel import get_conda_build_local_url # NOQA
get_rc_urls = lambda: list(conda.base.context.context.channels)
get_local_urls = lambda: list(get_conda_build_local_url()) or []
load_condarc = lambda fn: conda.base.context.reset_context([fn])
PaddingError = conda.exceptions.PaddingError
| conda/exports.py
--- /dev/null
+++ b/conda/exports.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from functools import partial
+from warnings import warn
+
+from conda import compat, plan
+compat = compat
+plan = plan
+
+from conda.api import get_index # NOQA
+get_index = get_index
+
+from conda.cli.common import (Completer, InstalledPackages, add_parser_channels, add_parser_prefix, # NOQA
+ specs_from_args, spec_from_line, specs_from_url) # NOQA
+Completer, InstalledPackages = Completer, InstalledPackages
+add_parser_channels, add_parser_prefix = add_parser_channels, add_parser_prefix
+specs_from_args, spec_from_line = specs_from_args, spec_from_line
+specs_from_url = specs_from_url
+
+from conda.cli.conda_argparse import ArgumentParser # NOQA
+ArgumentParser = ArgumentParser
+
+from conda.compat import (PY3, StringIO, configparser, input, iteritems, lchmod, string_types, # NOQA
+ text_type, TemporaryDirectory) # NOQA
+PY3, StringIO, configparser, input = PY3, StringIO, configparser, input
+iteritems, lchmod, string_types = iteritems, lchmod, string_types
+text_type, TemporaryDirectory = text_type, TemporaryDirectory
+
+from conda.connection import CondaSession # NOQA
+CondaSession = CondaSession
+
+from conda.fetch import TmpDownload, download, fetch_index # NOQA
+TmpDownload, download, fetch_index = TmpDownload, download, fetch_index
+handle_proxy_407 = lambda x, y: warn("handle_proxy_407 is deprecated. "
+ "Now handled by CondaSession.")
+
+from conda.install import (delete_trash, is_linked, linked, linked_data, move_to_trash, # NOQA
+ prefix_placeholder, rm_rf, symlink_conda, rm_fetched, package_cache) # NOQA
+delete_trash, is_linked, linked = delete_trash, is_linked, linked
+linked_data, move_to_trash = linked_data, move_to_trash
+prefix_placeholder, rm_rf, symlink_conda = prefix_placeholder, rm_rf, symlink_conda
+rm_fetched, package_cache = rm_fetched, package_cache
+
+from conda.lock import Locked # NOQA
+Locked = Locked
+
+from conda.misc import untracked, walk_prefix # NOQA
+untracked, walk_prefix = untracked, walk_prefix
+
+from conda.resolve import MatchSpec, NoPackagesFound, Resolve, Unsatisfiable, normalized_version # NOQA
+MatchSpec, NoPackagesFound, Resolve = MatchSpec, NoPackagesFound, Resolve
+Unsatisfiable, normalized_version = Unsatisfiable, normalized_version
+
+from conda.signature import KEYS, KEYS_DIR, hash_file, verify # NOQA
+KEYS, KEYS_DIR = KEYS, KEYS_DIR
+hash_file, verify = hash_file, verify
+
+from conda.utils import (human_bytes, hashsum_file, md5_file, memoized, unix_path_to_win, # NOQA
+ win_path_to_unix, url_path) # NOQA
+human_bytes, hashsum_file, md5_file = human_bytes, hashsum_file, md5_file
+memoized, unix_path_to_win = memoized, unix_path_to_win
+win_path_to_unix, url_path = win_path_to_unix, url_path
+
+from conda.config import sys_rc_path # NOQA
+sys_rc_path = sys_rc_path
+
+from conda.version import VersionOrder # NOQA
+VersionOrder = VersionOrder
+
+
+import conda.base.context # NOQA
+import conda.exceptions # NOQA
+from conda.base.context import get_prefix as context_get_prefix, non_x86_linux_machines # NOQA
+non_x86_linux_machines = non_x86_linux_machines
+
+from conda.base.constants import DEFAULT_CHANNELS # NOQA
+get_prefix = partial(context_get_prefix, conda.base.context.context)
+get_default_urls = lambda: DEFAULT_CHANNELS
+
+arch_name = conda.base.context.context.arch_name
+binstar_upload = conda.base.context.context.binstar_upload
+bits = conda.base.context.context.bits
+default_prefix = conda.base.context.context.default_prefix
+default_python = conda.base.context.context.default_python
+envs_dirs = conda.base.context.context.envs_dirs
+pkgs_dirs = conda.base.context.context.pkgs_dirs
+platform = conda.base.context.context.platform
+root_dir = conda.base.context.context.root_dir
+root_writable = conda.base.context.context.root_writable
+subdir = conda.base.context.context.subdir
+from conda.models.channel import get_conda_build_local_url # NOQA
+get_rc_urls = lambda: list(conda.base.context.context.channels)
+get_local_urls = lambda: list(get_conda_build_local_url()) or []
+load_condarc = lambda fn: conda.base.context.reset_context([fn])
+PaddingError = conda.exceptions.PaddingError | |
error while searching for `notebook`
I recently installed `ipykernel`, after which, commands
`ipython notebook` produced "ImportError: No module named 'notebook'", and
`jupyter notebook` produced "jupyter: 'notebook' is not a Jupyter command".
So, I ran `conda search notebook` and got the following message:
> Current conda install:
```
platform : linux-64
conda version : 4.2.9
conda is private : False
conda-env version : 4.2.9
conda-build version : 1.14.1
python version : 3.4.5.final.0
requests version : 2.9.1
root environment : /usr1/eclark/anaconda3 (writable)
default environment : /usr1/eclark/anaconda3
envs directories : /usr1/eclark/anaconda3/envs
package cache : /usr1/eclark/anaconda3/pkgs
channel URLs : https://conda.anaconda.org/anaconda/linux-64/
https://conda.anaconda.org/anaconda/noarch/
https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : /nfs/jet/home/eclark/.condarc
offline mode : False
```
`$ /usr1/eclark/anaconda3/bin/conda search notebook`
```
Traceback (most recent call last):
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/exceptions.py", line 473, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/cli/main_search.py", line 126, in execute
execute_search(args, parser)
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/cli/main_search.py", line 268, in execute_search
Channel(pkg.channel).canonical_name,
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/models/channel.py", line 44, in __call__
elif value.endswith('.tar.bz2'):
TypeError: endswith first arg must be bytes or a tuple of bytes, not str
```
error while searching for `notebook`
I recently installed `ipykernel`, after which, commands
`ipython notebook` produced "ImportError: No module named 'notebook'", and
`jupyter notebook` produced "jupyter: 'notebook' is not a Jupyter command".
So, I ran `conda search notebook` and got the following message:
> Current conda install:
```
platform : linux-64
conda version : 4.2.9
conda is private : False
conda-env version : 4.2.9
conda-build version : 1.14.1
python version : 3.4.5.final.0
requests version : 2.9.1
root environment : /usr1/eclark/anaconda3 (writable)
default environment : /usr1/eclark/anaconda3
envs directories : /usr1/eclark/anaconda3/envs
package cache : /usr1/eclark/anaconda3/pkgs
channel URLs : https://conda.anaconda.org/anaconda/linux-64/
https://conda.anaconda.org/anaconda/noarch/
https://repo.continuum.io/pkgs/free/linux-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/linux-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : /nfs/jet/home/eclark/.condarc
offline mode : False
```
`$ /usr1/eclark/anaconda3/bin/conda search notebook`
```
Traceback (most recent call last):
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/exceptions.py", line 473, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/cli/main_search.py", line 126, in execute
execute_search(args, parser)
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/cli/main_search.py", line 268, in execute_search
Channel(pkg.channel).canonical_name,
File "/usr1/eclark/anaconda3/lib/python3.4/site-packages/conda/models/channel.py", line 44, in __call__
elif value.endswith('.tar.bz2'):
TypeError: endswith first arg must be bytes or a tuple of bytes, not str
```
| conda/models/channel.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from itertools import chain
from logging import getLogger
from requests.packages.urllib3.util import Url
from ..base.constants import DEFAULT_CHANNELS_UNIX, DEFAULT_CHANNELS_WIN
from ..base.context import context
from ..common.compat import iteritems, odict, with_metaclass
from ..common.url import (has_scheme, is_url, is_windows_path, join_url, on_win, path_to_url,
split_conda_url_easy_parts, split_scheme_auth_token, urlparse)
try:
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concatv, topk
except ImportError:
from .._vendor.toolz.functoolz import excepts # NOQA
from .._vendor.toolz.itertoolz import concatv, topk # NOQA
log = getLogger(__name__)
# backward compatibility for conda-build
def get_conda_build_local_url():
return context.local_build_root,
"""
scheme <> auth <> location <> token <> channel <> subchannel <> platform <> package_filename
channel <> subchannel <> namespace <> package_name
"""
def tokenized_startswith(test_iterable, startswith_iterable):
return all(t == sw for t, sw in zip(test_iterable, startswith_iterable))
def tokenized_conda_url_startswith(test_url, startswith_url):
test_url, startswith_url = urlparse(test_url), urlparse(startswith_url)
if test_url.host != startswith_url.host or test_url.port != startswith_url.port:
return False
norm_url_path = lambda url: url.path.strip('/') or '/'
return tokenized_startswith(norm_url_path(test_url).split('/'),
norm_url_path(startswith_url).split('/'))
def _get_channel_for_name(channel_name):
def _get_channel_for_name_helper(name):
if name in context.custom_channels:
return context.custom_channels[name]
else:
test_name = name.rsplit('/', 1)[0] # progressively strip off path segments
if test_name == name:
return None
return _get_channel_for_name_helper(test_name)
channel = _get_channel_for_name_helper(channel_name)
if channel is not None:
# stripping off path threw information away from channel_name (i.e. any potential subname)
# channel.name *should still be* channel_name
channel.name = channel_name
return channel
else:
ca = context.channel_alias
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=channel_name)
def _read_channel_configuration(scheme, host, port, path):
# return location, name, scheme, auth, token
test_url = Url(host=host, port=port, path=path).url.rstrip('/')
# Step 1. migrated_custom_channels matches
for name, location in sorted(context.migrated_custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
location, _scheme, _auth, _token = split_scheme_auth_token(location)
if tokenized_conda_url_startswith(test_url, join_url(location, name)):
# translate location to new location, with new credentials
subname = test_url.replace(join_url(location, name), '', 1).strip('/')
channel_name = join_url(name, subname)
channel = _get_channel_for_name(channel_name)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
# Step 2. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
# Step 3. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
if test_url.startswith(that_test_url):
subname = test_url.replace(that_test_url, '', 1).strip('/')
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
# Step 4. channel_alias match
ca = context.channel_alias
if ca.location and test_url.startswith(ca.location):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
# Step 5. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
location, name = test_url.rsplit('/', 1)
if not location:
location = '/'
_scheme, _auth, _token = 'file', None, None
return location, name, _scheme, _auth, _token
# Step 6. fall through to host:port as channel_location and path as channel_name
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or None,
scheme or None, None, None)
def parse_conda_channel_url(url):
(scheme, auth, token, platform, package_filename,
host, port, path, query) = split_conda_url_easy_parts(url)
# recombine host, port, path to get a channel_name and channel_location
(channel_location, channel_name, configured_scheme, configured_auth,
configured_token) = _read_channel_configuration(scheme, host, port, path)
# if we came out with no channel_location or channel_name, we need to figure it out
# from host, port, path
assert channel_location is not None or channel_name is not None
return Channel(configured_scheme or 'https',
auth or configured_auth,
channel_location,
token or configured_token,
channel_name,
platform,
package_filename)
class ChannelType(type):
"""
This metaclass does basic caching and enables static constructor method usage with a
single arg.
"""
def __call__(cls, *args, **kwargs):
if len(args) == 1 and not kwargs:
value = args[0]
if isinstance(value, Channel):
return value
elif value in Channel._cache_:
return Channel._cache_[value]
else:
c = Channel.from_value(value)
Channel._cache_[value] = c
return c
else:
return super(ChannelType, cls).__call__(*args, **kwargs)
@with_metaclass(ChannelType)
class Channel(object):
_cache_ = dict()
@staticmethod
def _reset_state():
Channel._cache_ = dict()
def __init__(self, scheme=None, auth=None, location=None, token=None, name=None,
platform=None, package_filename=None):
self.scheme = scheme
self.auth = auth
self.location = location
self.token = token
self.name = name
self.platform = platform
self.package_filename = package_filename
@property
def channel_location(self):
return self.location
@property
def channel_name(self):
return self.name
@staticmethod
def from_url(url):
return parse_conda_channel_url(url)
@staticmethod
def from_channel_name(channel_name):
return _get_channel_for_name(channel_name)
@staticmethod
def from_value(value):
if value is None:
return Channel(name="<unknown>")
elif has_scheme(value):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
elif value.startswith(('./', '..', '~', '/')) or is_windows_path(value):
return Channel.from_url(path_to_url(value))
elif value.endswith('.tar.bz2'):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
else:
# at this point assume we don't have a bare (non-scheme) url
# e.g. this would be bad: repo.continuum.io/pkgs/free
if value in context.custom_multichannels:
return MultiChannel(value, context.custom_multichannels[value])
else:
return Channel.from_channel_name(value)
@staticmethod
def make_simple_channel(channel_alias, channel_url, name=None):
ca = channel_alias
test_url, scheme, auth, token = split_scheme_auth_token(channel_url)
if name and scheme:
return Channel(scheme=scheme, auth=auth, location=test_url, token=token,
name=name.strip('/'))
if scheme:
if ca.location and test_url.startswith(ca.location):
location, name = ca.location, test_url.replace(ca.location, '', 1)
else:
url_parts = urlparse(test_url)
location, name = Url(host=url_parts.host, port=url_parts.port).url, url_parts.path
return Channel(scheme=scheme, auth=auth, location=location, token=token,
name=name.strip('/'))
else:
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=name and name.strip('/') or channel_url.strip('/'))
@property
def canonical_name(self):
for multiname, channels in iteritems(context.custom_multichannels):
for channel in channels:
if self.name == channel.name:
return multiname
for that_name in context.custom_channels:
if tokenized_startswith(self.name.split('/'), that_name.split('/')):
return self.name
if any(c.location == self.location
for c in concatv((context.channel_alias,), context.migrated_channel_aliases)):
return self.name
# fall back to the equivalent of self.base_url
# re-defining here because base_url for MultiChannel is None
return "%s://%s/%s" % (self.scheme, self.location, self.name)
def urls(self, with_credentials=False, platform=None):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
base = join_url(*base)
def _platforms():
p = platform or self.platform or context.subdir
return (p, 'noarch') if p != 'noarch' else ('noarch',)
bases = (join_url(base, p) for p in _platforms())
if with_credentials and self.auth:
return ["%s://%s@%s" % (self.scheme, self.auth, b) for b in bases]
else:
return ["%s://%s" % (self.scheme, b) for b in bases]
def url(self, with_credentials=False):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
if self.platform:
base.append(self.platform)
if self.package_filename:
base.append(self.package_filename)
else:
base.append(context.subdir)
base = join_url(*base)
if with_credentials and self.auth:
return "%s://%s@%s" % (self.scheme, self.auth, base)
else:
return "%s://%s" % (self.scheme, base)
@property
def base_url(self):
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def __str__(self):
return self.base_url
def __repr__(self):
return ("Channel(scheme=%s, auth=%s, location=%s, token=%s, name=%s, platform=%s, "
"package_filename=%s)" % (self.scheme,
self.auth and "%s:<PASSWORD>" % self.auth.split(':')[0],
self.location,
self.token and "<TOKEN>",
self.name,
self.platform,
self.package_filename))
def __eq__(self, other):
if isinstance(other, Channel):
return self.location == other.location and self.name == other.name
else:
return False
def __hash__(self):
return hash((self.location, self.name))
def __nonzero__(self):
return any((self.location, self.name))
def __bool__(self):
return self.__nonzero__()
@property
def url_channel_wtf(self):
return self.base_url, self.canonical_name
class MultiChannel(Channel):
def __init__(self, name, channels):
self.name = name
self.location = None
self._channels = channels
self.scheme = None
self.auth = None
self.token = None
self.platform = None
self.package_filename = None
@property
def channel_location(self):
return self.location
@property
def canonical_name(self):
return self.name
def urls(self, with_credentials=False, platform=None):
if platform and platform != context.subdir and self.name == 'defaults':
# necessary shenanigan because different platforms have different default channels
urls = DEFAULT_CHANNELS_WIN if 'win' in platform else DEFAULT_CHANNELS_UNIX
ca = context.channel_alias
_channels = tuple(Channel.make_simple_channel(ca, v) for v in urls)
else:
_channels = self._channels
return list(chain.from_iterable(c.urls(with_credentials, platform) for c in _channels))
@property
def base_url(self):
return None
def url(self, with_credentials=False):
return None
def prioritize_channels(channels, with_credentials=True, platform=None):
# ('https://conda.anaconda.org/conda-forge/osx-64/', ('conda-forge', 1))
result = odict()
for q, chn in enumerate(channels):
channel = Channel(chn)
for url in channel.urls(with_credentials, platform):
if url in result:
continue
result[url] = channel.canonical_name, q
return result
def offline_keep(url):
return not context.offline or not is_url(url) or url.startswith('file:/')
<|code_end|>
| conda/models/channel.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from itertools import chain
from logging import getLogger
from requests.packages.urllib3.util import Url
from ..base.constants import DEFAULT_CHANNELS_UNIX, DEFAULT_CHANNELS_WIN, UTF8
from ..base.context import context
from ..common.compat import iteritems, odict, with_metaclass
from ..common.url import (has_scheme, is_url, is_windows_path, join_url, on_win, path_to_url,
split_conda_url_easy_parts, split_scheme_auth_token, urlparse)
try:
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concatv, topk
except ImportError:
from .._vendor.toolz.functoolz import excepts # NOQA
from .._vendor.toolz.itertoolz import concatv, topk # NOQA
log = getLogger(__name__)
# backward compatibility for conda-build
def get_conda_build_local_url():
return context.local_build_root,
"""
scheme <> auth <> location <> token <> channel <> subchannel <> platform <> package_filename
channel <> subchannel <> namespace <> package_name
"""
def tokenized_startswith(test_iterable, startswith_iterable):
return all(t == sw for t, sw in zip(test_iterable, startswith_iterable))
def tokenized_conda_url_startswith(test_url, startswith_url):
test_url, startswith_url = urlparse(test_url), urlparse(startswith_url)
if test_url.host != startswith_url.host or test_url.port != startswith_url.port:
return False
norm_url_path = lambda url: url.path.strip('/') or '/'
return tokenized_startswith(norm_url_path(test_url).split('/'),
norm_url_path(startswith_url).split('/'))
def _get_channel_for_name(channel_name):
def _get_channel_for_name_helper(name):
if name in context.custom_channels:
return context.custom_channels[name]
else:
test_name = name.rsplit('/', 1)[0] # progressively strip off path segments
if test_name == name:
return None
return _get_channel_for_name_helper(test_name)
channel = _get_channel_for_name_helper(channel_name)
if channel is not None:
# stripping off path threw information away from channel_name (i.e. any potential subname)
# channel.name *should still be* channel_name
channel.name = channel_name
return channel
else:
ca = context.channel_alias
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=channel_name)
def _read_channel_configuration(scheme, host, port, path):
# return location, name, scheme, auth, token
test_url = Url(host=host, port=port, path=path).url.rstrip('/')
# Step 1. migrated_custom_channels matches
for name, location in sorted(context.migrated_custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
location, _scheme, _auth, _token = split_scheme_auth_token(location)
if tokenized_conda_url_startswith(test_url, join_url(location, name)):
# translate location to new location, with new credentials
subname = test_url.replace(join_url(location, name), '', 1).strip('/')
channel_name = join_url(name, subname)
channel = _get_channel_for_name(channel_name)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
# Step 2. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
# Step 3. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
if test_url.startswith(that_test_url):
subname = test_url.replace(that_test_url, '', 1).strip('/')
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
# Step 4. channel_alias match
ca = context.channel_alias
if ca.location and test_url.startswith(ca.location):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
# Step 5. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
location, name = test_url.rsplit('/', 1)
if not location:
location = '/'
_scheme, _auth, _token = 'file', None, None
return location, name, _scheme, _auth, _token
# Step 6. fall through to host:port as channel_location and path as channel_name
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or None,
scheme or None, None, None)
def parse_conda_channel_url(url):
(scheme, auth, token, platform, package_filename,
host, port, path, query) = split_conda_url_easy_parts(url)
# recombine host, port, path to get a channel_name and channel_location
(channel_location, channel_name, configured_scheme, configured_auth,
configured_token) = _read_channel_configuration(scheme, host, port, path)
# if we came out with no channel_location or channel_name, we need to figure it out
# from host, port, path
assert channel_location is not None or channel_name is not None
return Channel(configured_scheme or 'https',
auth or configured_auth,
channel_location,
token or configured_token,
channel_name,
platform,
package_filename)
class ChannelType(type):
"""
This metaclass does basic caching and enables static constructor method usage with a
single arg.
"""
def __call__(cls, *args, **kwargs):
if len(args) == 1 and not kwargs:
value = args[0]
if isinstance(value, Channel):
return value
elif value in Channel._cache_:
return Channel._cache_[value]
else:
c = Channel.from_value(value)
Channel._cache_[value] = c
return c
else:
return super(ChannelType, cls).__call__(*args, **kwargs)
@with_metaclass(ChannelType)
class Channel(object):
_cache_ = dict()
@staticmethod
def _reset_state():
Channel._cache_ = dict()
def __init__(self, scheme=None, auth=None, location=None, token=None, name=None,
platform=None, package_filename=None):
self.scheme = scheme
self.auth = auth
self.location = location
self.token = token
self.name = name
self.platform = platform
self.package_filename = package_filename
@property
def channel_location(self):
return self.location
@property
def channel_name(self):
return self.name
@staticmethod
def from_url(url):
return parse_conda_channel_url(url)
@staticmethod
def from_channel_name(channel_name):
return _get_channel_for_name(channel_name)
@staticmethod
def from_value(value):
if value is None:
return Channel(name="<unknown>")
if hasattr(value, 'decode'):
value = value.decode(UTF8)
if has_scheme(value):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
elif value.startswith(('./', '..', '~', '/')) or is_windows_path(value):
return Channel.from_url(path_to_url(value))
elif value.endswith('.tar.bz2'):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
else:
# at this point assume we don't have a bare (non-scheme) url
# e.g. this would be bad: repo.continuum.io/pkgs/free
if value in context.custom_multichannels:
return MultiChannel(value, context.custom_multichannels[value])
else:
return Channel.from_channel_name(value)
@staticmethod
def make_simple_channel(channel_alias, channel_url, name=None):
ca = channel_alias
test_url, scheme, auth, token = split_scheme_auth_token(channel_url)
if name and scheme:
return Channel(scheme=scheme, auth=auth, location=test_url, token=token,
name=name.strip('/'))
if scheme:
if ca.location and test_url.startswith(ca.location):
location, name = ca.location, test_url.replace(ca.location, '', 1)
else:
url_parts = urlparse(test_url)
location, name = Url(host=url_parts.host, port=url_parts.port).url, url_parts.path
return Channel(scheme=scheme, auth=auth, location=location, token=token,
name=name.strip('/'))
else:
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=name and name.strip('/') or channel_url.strip('/'))
@property
def canonical_name(self):
for multiname, channels in iteritems(context.custom_multichannels):
for channel in channels:
if self.name == channel.name:
return multiname
for that_name in context.custom_channels:
if tokenized_startswith(self.name.split('/'), that_name.split('/')):
return self.name
if any(c.location == self.location
for c in concatv((context.channel_alias,), context.migrated_channel_aliases)):
return self.name
# fall back to the equivalent of self.base_url
# re-defining here because base_url for MultiChannel is None
return "%s://%s/%s" % (self.scheme, self.location, self.name)
def urls(self, with_credentials=False, platform=None):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
base = join_url(*base)
def _platforms():
p = platform or self.platform or context.subdir
return (p, 'noarch') if p != 'noarch' else ('noarch',)
bases = (join_url(base, p) for p in _platforms())
if with_credentials and self.auth:
return ["%s://%s@%s" % (self.scheme, self.auth, b) for b in bases]
else:
return ["%s://%s" % (self.scheme, b) for b in bases]
def url(self, with_credentials=False):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
if self.platform:
base.append(self.platform)
if self.package_filename:
base.append(self.package_filename)
else:
base.append(context.subdir)
base = join_url(*base)
if with_credentials and self.auth:
return "%s://%s@%s" % (self.scheme, self.auth, base)
else:
return "%s://%s" % (self.scheme, base)
@property
def base_url(self):
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def __str__(self):
return self.base_url
def __repr__(self):
return ("Channel(scheme=%s, auth=%s, location=%s, token=%s, name=%s, platform=%s, "
"package_filename=%s)" % (self.scheme,
self.auth and "%s:<PASSWORD>" % self.auth.split(':')[0],
self.location,
self.token and "<TOKEN>",
self.name,
self.platform,
self.package_filename))
def __eq__(self, other):
if isinstance(other, Channel):
return self.location == other.location and self.name == other.name
else:
return False
def __hash__(self):
return hash((self.location, self.name))
def __nonzero__(self):
return any((self.location, self.name))
def __bool__(self):
return self.__nonzero__()
@property
def url_channel_wtf(self):
return self.base_url, self.canonical_name
class MultiChannel(Channel):
def __init__(self, name, channels):
self.name = name
self.location = None
self._channels = channels
self.scheme = None
self.auth = None
self.token = None
self.platform = None
self.package_filename = None
@property
def channel_location(self):
return self.location
@property
def canonical_name(self):
return self.name
def urls(self, with_credentials=False, platform=None):
if platform and platform != context.subdir and self.name == 'defaults':
# necessary shenanigan because different platforms have different default channels
urls = DEFAULT_CHANNELS_WIN if 'win' in platform else DEFAULT_CHANNELS_UNIX
ca = context.channel_alias
_channels = tuple(Channel.make_simple_channel(ca, v) for v in urls)
else:
_channels = self._channels
return list(chain.from_iterable(c.urls(with_credentials, platform) for c in _channels))
@property
def base_url(self):
return None
def url(self, with_credentials=False):
return None
def prioritize_channels(channels, with_credentials=True, platform=None):
# ('https://conda.anaconda.org/conda-forge/osx-64/', ('conda-forge', 1))
result = odict()
for q, chn in enumerate(channels):
channel = Channel(chn)
for url in channel.urls(with_credentials, platform):
if url in result:
continue
result[url] = channel.canonical_name, q
return result
def offline_keep(url):
return not context.offline or not is_url(url) or url.startswith('file:/')
| conda/models/channel.py
--- a/conda/models/channel.py
+++ b/conda/models/channel.py
@@ -5,7 +5,7 @@
from logging import getLogger
from requests.packages.urllib3.util import Url
-from ..base.constants import DEFAULT_CHANNELS_UNIX, DEFAULT_CHANNELS_WIN
+from ..base.constants import DEFAULT_CHANNELS_UNIX, DEFAULT_CHANNELS_WIN, UTF8
from ..base.context import context
from ..common.compat import iteritems, odict, with_metaclass
from ..common.url import (has_scheme, is_url, is_windows_path, join_url, on_win, path_to_url,
@@ -203,7 +203,9 @@ def from_channel_name(channel_name):
def from_value(value):
if value is None:
return Channel(name="<unknown>")
- elif has_scheme(value):
+ if hasattr(value, 'decode'):
+ value = value.decode(UTF8)
+ if has_scheme(value):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value) |
Installing ompython-2.0.7
Current conda install:
```
platform : osx-64
conda version : 4.2.7
conda is private : False
conda-env version : 4.2.7
conda-build version : not installed
python version : 2.7.12.final.0
requests version : 2.11.1
root environment : /Library/Frameworks/Python.framework/Versions/2.7 (writable)
default environment : /Library/Frameworks/Python.framework/Versions/2.7
envs directories : /Library/Frameworks/Python.framework/Versions/2.7/envs
package cache : /Library/Frameworks/Python.framework/Versions/2.7/pkgs
channel URLs : https://repo.continuum.io/pkgs/free/osx-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/osx-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : None
offline mode : False
```
`$ /Library/Frameworks/Python.framework/Versions/2.7/bin/conda install -c mutirri ompython=2.0.7`
```
Traceback (most recent call last):
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/conda/exceptions.py", line 472, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/conda/cli/main_install.py", line 80, in execute
install(args, parser, 'install')
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/conda/cli/install.py", line 420, in install
raise CondaRuntimeError('RuntimeError: %s' % e)
CondaRuntimeError: Runtime error: RuntimeError: Invalid mode: <conda.install.FileMode object at 0x10846b110>
```
Installing ompython-2.0.7
Current conda install:
```
platform : osx-64
conda version : 4.2.7
conda is private : False
conda-env version : 4.2.7
conda-build version : not installed
python version : 2.7.12.final.0
requests version : 2.11.1
root environment : /Library/Frameworks/Python.framework/Versions/2.7 (writable)
default environment : /Library/Frameworks/Python.framework/Versions/2.7
envs directories : /Library/Frameworks/Python.framework/Versions/2.7/envs
package cache : /Library/Frameworks/Python.framework/Versions/2.7/pkgs
channel URLs : https://repo.continuum.io/pkgs/free/osx-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/osx-64/
https://repo.continuum.io/pkgs/pro/noarch/
config file : None
offline mode : False
```
`$ /Library/Frameworks/Python.framework/Versions/2.7/bin/conda install -c mutirri ompython=2.0.7`
```
Traceback (most recent call last):
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/conda/exceptions.py", line 472, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/conda/cli/main.py", line 144, in _main
exit_code = args.func(args, p)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/conda/cli/main_install.py", line 80, in execute
install(args, parser, 'install')
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/conda/cli/install.py", line 420, in install
raise CondaRuntimeError('RuntimeError: %s' % e)
CondaRuntimeError: Runtime error: RuntimeError: Invalid mode: <conda.install.FileMode object at 0x10846b110>
```
| conda/install.py
<|code_start|>
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import struct
import subprocess
import sys
import tarfile
import traceback
from collections import namedtuple
from enum import Enum
from itertools import chain
from os.path import (abspath, basename, dirname, exists, isdir, isfile, islink, join, normcase,
normpath)
from . import CondaError
from .base.constants import UTF8
from .base.context import context
from .common.disk import exp_backoff_fn, rm_rf
from .common.url import path_to_url
from .exceptions import CondaOSError, LinkError, PaddingError
from .lock import DirectoryLock, FileLock
from .models.channel import Channel
from .utils import on_win
# conda-build compatibility
from .common.disk import delete_trash, move_to_trash, move_path_to_trash # NOQA
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise CondaOSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise CondaOSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise CondaOSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
SHEBANG_REGEX = re.compile(br'^(#!((?:\\ |[^ \n\r])+)(.*))')
class FileMode(Enum):
text = 'text'
binary = 'binary'
def __str__(self):
return "%s" % self.value
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise CondaError("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def yield_lines(path):
"""Generator function for lines in file. Empty generator if path does not exist.
Args:
path (str): path to file
Returns:
iterator: each line in file, not starting with '#'
"""
try:
with open(path) as fh:
for line in fh:
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
raise StopIteration
else:
raise
PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
# backwards compatibility for conda-build
prefix_placeholder = PREFIX_PLACEHOLDER
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filepaths to tuples(placeholder, FileMode)
A line in `has_prefix` contains one of
* filepath
* placeholder mode filepath
mode values are one of
* text
* binary
"""
ParseResult = namedtuple('ParseResult', ('placeholder', 'filemode', 'filepath'))
def parse_line(line):
# placeholder, filemode, filepath
parts = tuple(x.strip('"\'') for x in shlex.split(line, posix=False))
if len(parts) == 1:
return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0])
elif len(parts) == 3:
return ParseResult(parts[0], FileMode(parts[1]), parts[2])
else:
raise RuntimeError("Invalid has_prefix file at path: %s" % path)
parsed_lines = (parse_line(line) for line in yield_lines(path))
return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines}
class _PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
if on_win:
if has_pyzzer_entry_point(data):
return replace_pyzzer_entry_point_shebang(data, a, b)
# currently we should skip replacement on Windows for things we don't understand.
else:
return data
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise _PaddingError
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
data = pat.sub(replace, data)
assert len(data) == original_data_len
return data
def replace_long_shebang(mode, data):
if mode is FileMode.text:
shebang_match = SHEBANG_REGEX.match(data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode(UTF8).split('/')[-1]
new_shebang = '#!/usr/bin/env %s%s' % (executable_name, options.decode(UTF8))
data = data.replace(whole_shebang, new_shebang.encode(UTF8))
else:
# TODO: binary shebangs exist; figure this out in the future if text works well
log.debug("TODO: binary shebangs exist; figure this out in the future if text works well")
return data
def has_pyzzer_entry_point(data):
pos = data.rfind(b'PK\x05\x06')
return pos >= 0
def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
"""Code adapted from pyzzer. This is meant to deal with entry point exe's created by distlib,
which consist of a launcher, then a shebang, then a zip archive of the entry point code to run.
We need to change the shebang.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112 # NOQA
"""
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
launcher = shebang = None
pos = all_data.rfind(b'PK\x05\x06')
if pos >= 0:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
if data and shebang and launcher:
if hasattr(placeholder, 'encode'):
placeholder = placeholder.encode('utf-8')
if hasattr(new_prefix, 'encode'):
new_prefix = new_prefix.encode('utf-8')
shebang = shebang.replace(placeholder, new_prefix)
all_data = b"".join([launcher, shebang, data])
return all_data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode is FileMode.text:
data = data.replace(placeholder.encode(UTF8), new_prefix.encode(UTF8))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode(UTF8), new_prefix.encode(UTF8))
else:
raise RuntimeError("Invalid mode: %r" % mode)
return data
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
if on_win and mode is FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (str(parts[0]), str(parts[1]), str(parts[2]), str(channel))
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ.copy()
env[str('ROOT_PREFIX')] = sys.prefix
env[str('PREFIX')] = str(env_prefix or prefix)
env[str('PKG_NAME')], env[str('PKG_VERSION')], env[str('PKG_BUILDNUM')], _ = dist2quad(dist)
if action == 'pre-link':
env[str('SOURCE_DIR')] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode(UTF8)
except IOError:
pass
return None
def read_no_link(info_dir):
return set(chain(yield_lines(join(info_dir, 'no_link')),
yield_lines(join(info_dir, 'no_softlink'))))
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
rm_rf(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = url
schannel = Channel(url).canonical_name
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
# import pdb; pdb.set_trace()
for pdir in context.pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in context.pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[path_to_url(fname)]
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("File not removed during RM_FETCHED instruction: %s", fname)
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_FETCHED instruction: %s", fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_EXTRACTED instruction: %s", fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
path = fname[:-8]
with FileLock(path):
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None, ignore_channels=False):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = Channel(url).url_channel_wtf
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
if ignore_channels:
linked_data_[prefix][dname] = rec
else:
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
rm_rf(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix, ignore_channels=False):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5], ignore_channels=ignore_channels)
return recs
def linked(prefix, ignore_channels=False):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix, ignore_channels=ignore_channels).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
log.debug("linking package %s with link type %s", dist, linktype)
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r', pkgs_dir, prefix, dist, linktype)
if not run_script(source_dir, dist, 'pre-link', prefix):
raise LinkError('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
# for the lock issue
# may run into lock if prefix not exist
if not isdir(prefix):
os.makedirs(prefix)
with DirectoryLock(prefix), FileLock(source_dir):
for filepath in files:
src = join(source_dir, filepath)
dst = join(prefix, filepath)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.info("file exists, but clobbering: %r" % dst)
rm_rf(dst)
lt = linktype
if filepath in has_prefix_files or filepath in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
raise CondaOSError('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for filepath in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[filepath]
try:
update_prefix(join(prefix, filepath), prefix, placeholder, mode)
except _PaddingError:
raise PaddingError(dist, placeholder, len(placeholder))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if context.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
raise LinkError("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
if isfile(alt_files_path):
# alt_files_path is a hack for noarch
meta_dict['files'] = list(yield_lines(alt_files_path))
else:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with DirectoryLock(prefix):
log.debug("unlinking package %s", dist)
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
# remove empty directories
for path in sorted(dst_dirs2, key=len, reverse=True):
if isdir(path) and not os.listdir(path):
rm_rf(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
fh = sys.stderr if context.json else sys.stdout
fh.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
<|code_end|>
| conda/install.py
# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import struct
import subprocess
import sys
import tarfile
import traceback
from collections import namedtuple
from enum import Enum
from itertools import chain
from os.path import (abspath, basename, dirname, exists, isdir, isfile, islink, join, normcase,
normpath)
from . import CondaError
from .base.constants import UTF8
from .base.context import context
from .common.disk import exp_backoff_fn, rm_rf
from .common.url import path_to_url
from .exceptions import CondaOSError, LinkError, PaddingError
from .lock import DirectoryLock, FileLock
from .models.channel import Channel
from .utils import on_win
# conda-build compatibility
from .common.disk import delete_trash, move_to_trash, move_path_to_trash # NOQA
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise CondaOSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise CondaOSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise CondaOSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
SHEBANG_REGEX = re.compile(br'^(#!((?:\\ |[^ \n\r])+)(.*))')
class FileMode(Enum):
text = 'text'
binary = 'binary'
def __str__(self):
return "%s" % self.value
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise CondaError("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def yield_lines(path):
"""Generator function for lines in file. Empty generator if path does not exist.
Args:
path (str): path to file
Returns:
iterator: each line in file, not starting with '#'
"""
try:
with open(path) as fh:
for line in fh:
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
raise StopIteration
else:
raise
PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
# backwards compatibility for conda-build
prefix_placeholder = PREFIX_PLACEHOLDER
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filepaths to tuples(placeholder, FileMode)
A line in `has_prefix` contains one of
* filepath
* placeholder mode filepath
mode values are one of
* text
* binary
"""
ParseResult = namedtuple('ParseResult', ('placeholder', 'filemode', 'filepath'))
def parse_line(line):
# placeholder, filemode, filepath
parts = tuple(x.strip('"\'') for x in shlex.split(line, posix=False))
if len(parts) == 1:
return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0])
elif len(parts) == 3:
return ParseResult(parts[0], FileMode(parts[1]), parts[2])
else:
raise RuntimeError("Invalid has_prefix file at path: %s" % path)
parsed_lines = (parse_line(line) for line in yield_lines(path))
return {pr.filepath: (pr.placeholder, pr.filemode) for pr in parsed_lines}
class _PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
if on_win:
if has_pyzzer_entry_point(data):
return replace_pyzzer_entry_point_shebang(data, a, b)
# currently we should skip replacement on Windows for things we don't understand.
else:
return data
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise _PaddingError
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
data = pat.sub(replace, data)
assert len(data) == original_data_len
return data
def replace_long_shebang(mode, data):
if mode == FileMode.text:
shebang_match = SHEBANG_REGEX.match(data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode(UTF8).split('/')[-1]
new_shebang = '#!/usr/bin/env %s%s' % (executable_name, options.decode(UTF8))
data = data.replace(whole_shebang, new_shebang.encode(UTF8))
else:
# TODO: binary shebangs exist; figure this out in the future if text works well
log.debug("TODO: binary shebangs exist; figure this out in the future if text works well")
return data
def has_pyzzer_entry_point(data):
pos = data.rfind(b'PK\x05\x06')
return pos >= 0
def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
"""Code adapted from pyzzer. This is meant to deal with entry point exe's created by distlib,
which consist of a launcher, then a shebang, then a zip archive of the entry point code to run.
We need to change the shebang.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112 # NOQA
"""
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
launcher = shebang = None
pos = all_data.rfind(b'PK\x05\x06')
if pos >= 0:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
if data and shebang and launcher:
if hasattr(placeholder, 'encode'):
placeholder = placeholder.encode('utf-8')
if hasattr(new_prefix, 'encode'):
new_prefix = new_prefix.encode('utf-8')
shebang = shebang.replace(placeholder, new_prefix)
all_data = b"".join([launcher, shebang, data])
return all_data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == FileMode.text:
data = data.replace(placeholder.encode(UTF8), new_prefix.encode(UTF8))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode(UTF8), new_prefix.encode(UTF8))
else:
raise RuntimeError("Invalid mode: %r" % mode)
return data
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
if on_win and mode == FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (str(parts[0]), str(parts[1]), str(parts[2]), str(channel))
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ.copy()
env[str('ROOT_PREFIX')] = sys.prefix
env[str('PREFIX')] = str(env_prefix or prefix)
env[str('PKG_NAME')], env[str('PKG_VERSION')], env[str('PKG_BUILDNUM')], _ = dist2quad(dist)
if action == 'pre-link':
env[str('SOURCE_DIR')] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode(UTF8)
except IOError:
pass
return None
def read_no_link(info_dir):
return set(chain(yield_lines(join(info_dir, 'no_link')),
yield_lines(join(info_dir, 'no_softlink'))))
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
rm_rf(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = url
schannel = Channel(url).canonical_name
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
# import pdb; pdb.set_trace()
for pdir in context.pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in context.pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[path_to_url(fname)]
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("File not removed during RM_FETCHED instruction: %s", fname)
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_FETCHED instruction: %s", fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_EXTRACTED instruction: %s", fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
path = fname[:-8]
with FileLock(path):
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None, ignore_channels=False):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = Channel(url).url_channel_wtf
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
if ignore_channels:
linked_data_[prefix][dname] = rec
else:
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
rm_rf(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix, ignore_channels=False):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5], ignore_channels=ignore_channels)
return recs
def linked(prefix, ignore_channels=False):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix, ignore_channels=ignore_channels).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
log.debug("linking package %s with link type %s", dist, linktype)
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r', pkgs_dir, prefix, dist, linktype)
if not run_script(source_dir, dist, 'pre-link', prefix):
raise LinkError('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
# for the lock issue
# may run into lock if prefix not exist
if not isdir(prefix):
os.makedirs(prefix)
with DirectoryLock(prefix), FileLock(source_dir):
for filepath in files:
src = join(source_dir, filepath)
dst = join(prefix, filepath)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.info("file exists, but clobbering: %r" % dst)
rm_rf(dst)
lt = linktype
if filepath in has_prefix_files or filepath in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
raise CondaOSError('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for filepath in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[filepath]
try:
update_prefix(join(prefix, filepath), prefix, placeholder, mode)
except _PaddingError:
raise PaddingError(dist, placeholder, len(placeholder))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if context.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
raise LinkError("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
if isfile(alt_files_path):
# alt_files_path is a hack for noarch
meta_dict['files'] = list(yield_lines(alt_files_path))
else:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with DirectoryLock(prefix):
log.debug("unlinking package %s", dist)
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
# remove empty directories
for path in sorted(dst_dirs2, key=len, reverse=True):
if isdir(path) and not os.listdir(path):
rm_rf(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
fh = sys.stderr if context.json else sys.stdout
fh.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
| conda/install.py
--- a/conda/install.py
+++ b/conda/install.py
@@ -281,7 +281,7 @@ def replace(match):
def replace_long_shebang(mode, data):
- if mode is FileMode.text:
+ if mode == FileMode.text:
shebang_match = SHEBANG_REGEX.match(data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
@@ -350,7 +350,7 @@ def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
def replace_prefix(mode, data, placeholder, new_prefix):
- if mode is FileMode.text:
+ if mode == FileMode.text:
data = data.replace(placeholder.encode(UTF8), new_prefix.encode(UTF8))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode(UTF8), new_prefix.encode(UTF8))
@@ -360,7 +360,7 @@ def replace_prefix(mode, data, placeholder, new_prefix):
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
- if on_win and mode is FileMode.text:
+ if on_win and mode == FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/') |
--mkdir no longer works
After upgrading to `conda=4.2`:
```
C:\> conda install --mkdir -n name python=3.5
CondaEnvironmentNotFoundError: Could not find environment: name .
You can list all discoverable environments with `conda info --envs`.
```
--mkdir no longer works
After upgrading to `conda=4.2`:
```
C:\> conda install --mkdir -n name python=3.5
CondaEnvironmentNotFoundError: Could not find environment: name .
You can list all discoverable environments with `conda info --envs`.
```
| conda/cli/install.py
<|code_start|>
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import errno
import logging
import os
import shutil
import tarfile
import tempfile
from difflib import get_close_matches
from os.path import abspath, basename, exists, isdir, join
from .. import text_type
from .._vendor.auxlib.ish import dals
from ..api import get_index
from ..base.constants import ROOT_ENV_NAME
from ..base.context import check_write, context
from ..cli import common
from ..cli.find_commands import find_executable
from ..exceptions import (CondaAssertionError, CondaEnvironmentNotFoundError,
CondaFileNotFoundError, CondaIOError, CondaImportError, CondaOSError,
CondaRuntimeError, CondaSystemExit, CondaValueError,
DirectoryNotFoundError, DryRunExit, LockError, NoPackagesFoundError,
PackageNotFoundError, TooManyArgumentsError, UnsatisfiableError)
from ..install import is_linked, linked as install_linked, name_dist
from ..misc import append_env, clone_env, explicit, touch_nonadmin
from ..plan import (add_defaults_to_specs, display_actions, execute_actions, get_pinned_specs,
install_actions, is_root_prefix, nothing_to_do, revert_actions)
from ..resolve import Resolve
from ..utils import on_win
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
raise CondaFileNotFoundError(tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
explicit(paths, prefix, verbose=verbose)
shutil.rmtree(tmp_dir)
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == ROOT_ENV_NAME:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
if isdir(prefix) and 'conda-meta' not in os.listdir(prefix):
return None
error = "prefix already exists: %s" % prefix
if error:
raise CondaValueError(error, json)
def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
raise DirectoryNotFoundError(src_arg, 'no such directory: %s' % src_arg, json)
else:
src_prefix = context.clone_src
if not json:
print("Source: %s" % src_prefix)
print("Destination: %s" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet,
index_args=index_args)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
if on_win:
message = dals("""
#
# To activate this environment, use:
# > activate %s
#
# To deactivate this environment, use:
# > deactivate %s
#
# * for power-users using bash, you must source
#
""")
else:
message = dals("""
#
# To activate this environment, use:
# > source activate %s
#
# To deactivate this environment, use:
# > source deactivate %s
#
""")
return message % (arg, arg)
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
CondaValueError("expected revision number, not: '%s'" % arg, json)
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
context.validate_all()
newenv = bool(command == 'create')
isupdate = bool(command == 'update')
isinstall = bool(command == 'install')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = context.prefix if newenv else context.prefix_w_legacy_search
if newenv:
check_prefix(prefix, json=context.json)
if context.force_32bit and is_root_prefix(prefix):
raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in root env")
if isupdate and not (args.file or args.all or args.packages):
raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)
linked = install_linked(prefix)
lnames = {name_dist(d) for d in linked}
if isupdate and not args.all:
for name in args.packages:
common.arg2spec(name, json=context.json, update=True)
if name not in lnames:
raise PackageNotFoundError(name, "Package '%s' is not installed in %s" %
(name, prefix))
if newenv and not args.no_default_packages:
default_packages = list(context.create_default_packages)
# Override defaults if they are specified at the command line
for default_pkg in context.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_use_local(args)
common.ensure_override_channels_requires_channel(args)
index_args = {
'use_cache': args.use_index_cache,
'channel_urls': args.channel or (),
'unknown': args.unknown,
'prepend': not args.override_channels,
'use_local': args.use_local
}
specs = []
if args.file:
for fpath in args.file:
specs.extend(common.specs_from_url(fpath, json=context.json))
if '@EXPLICIT' in specs:
explicit(specs, prefix, verbose=not context.quiet, index_args=index_args)
return
elif getattr(args, 'all', False):
if not linked:
raise PackageNotFoundError('', "There are no packages installed in the "
"prefix %s" % prefix)
specs.extend(nm for nm in lnames)
specs.extend(common.specs_from_args(args.packages, json=context.json))
if isinstall and args.revision:
get_revision(args.revision, json=context.json)
elif isinstall and not (args.file or args.packages):
raise CondaValueError("too few arguments, "
"must supply command line package specs or --file")
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
explicit(args.packages, prefix, verbose=not context.quiet)
return
else:
raise CondaValueError("cannot mix specifications with conda package"
" filenames")
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
install_tar(prefix, tar_path, verbose=not context.quiet)
return
if newenv and args.clone:
package_diff = set(args.packages) - set(default_packages)
if package_diff:
raise TooManyArgumentsError(0, len(package_diff), list(package_diff),
'did not expect any arguments for --clone')
clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args)
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
return
index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'],
platform=None, use_local=index_args['use_local'],
use_cache=index_args['use_cache'], unknown=index_args['unknown'],
prefix=prefix)
r = Resolve(index)
ospecs = list(specs)
add_defaults_to_specs(r, linked, specs, update=isupdate)
# Don't update packages that are already up-to-date
if isupdate and not (args.all or args.force):
orig_packages = args.packages[:]
installed_metadata = [is_linked(prefix, dist) for dist in linked]
for name in orig_packages:
vers_inst = [m['version'] for m in installed_metadata if m['name'] == name]
build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name]
channel_inst = [m['channel'] for m in installed_metadata if m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
assert len(channel_inst) == 1, name
except AssertionError as e:
raise CondaAssertionError(text_type(e))
pkgs = sorted(r.get_pkgs(name))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if all([latest.version == vers_inst[0],
latest.build_number == build_inst[0],
latest.channel == channel_inst[0]]):
args.packages.remove(name)
if not args.packages:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
if args.no_deps:
only_names = set(s.split()[0] for s in ospecs)
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
raise CondaOSError("Error: could not create directory: %s" % prefix)
else:
raise CondaEnvironmentNotFoundError(prefix)
try:
if isinstall and args.revision:
actions = revert_actions(prefix, get_revision(args.revision), index)
else:
with common.json_progress_bars(json=context.json and not context.quiet):
actions = install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
always_copy=context.always_copy,
minimal_hint=args.alt_hint,
update_deps=context.update_dependencies)
except NoPackagesFoundError as e:
error_message = [e.args[0]]
if isupdate and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not context.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
for pkg in e.pkgs:
p = pkg.split()[0]
if p in args._skip:
# Avoid infinite recursion. This can happen if a spec
# comes from elsewhere, like --file
raise
args._skip.append(p)
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
nfound = 0
for pkg in sorted(e.pkgs):
pkg = pkg.split()[0]
if pkg in packages:
continue
close = get_close_matches(pkg, packages, cutoff=0.7)
if not close:
continue
if nfound == 0:
error_message.append("\n\nClose matches found; did you mean one of these?\n")
error_message.append("\n %s: %s" % (pkg, ', '.join(close)))
nfound += 1
error_message.append('\n\nYou can search for packages on anaconda.org with')
error_message.append('\n\n anaconda search -t conda %s' % pkg)
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message.append('\n\n(and similarly for the other packages)')
if not find_executable('anaconda', include_others=False):
error_message.append('\n\nYou may need to install the anaconda-client')
error_message.append(' command line client with')
error_message.append('\n\n conda install anaconda-client')
pinned_specs = get_pinned_specs(prefix)
if pinned_specs:
path = join(prefix, 'conda-meta', 'pinned')
error_message.append("\n\nNote that you have pinned specs in %s:" % path)
error_message.append("\n\n %r" % pinned_specs)
error_message = ''.join(error_message)
raise PackageNotFoundError('', error_message)
except (UnsatisfiableError, SystemExit) as e:
# Unsatisfiable package specifications/no such revision/import error
if e.args and 'could not import' in e.args[0]:
raise CondaImportError(text_type(e))
raise
if nothing_to_do(actions) and not newenv:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
elif newenv:
# needed in the case of creating an empty env
from ..instructions import LINK, UNLINK, SYMLINK_CONDA
if not actions[LINK] and not actions[UNLINK]:
actions[SYMLINK_CONDA] = [context.root_dir]
if not context.json:
print()
print("Package plan for installation in environment %s:" % prefix)
display_actions(actions, index, show_channel_urls=context.show_channel_urls)
if command in {'install', 'update'}:
check_write(command, prefix)
if not context.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
raise DryRunExit()
with common.json_progress_bars(json=context.json and not context.quiet):
try:
execute_actions(actions, index, verbose=not context.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise CondaIOError("Can't write the history file", e)
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
raise LockError('Already locked: %s' % text_type(e))
else:
raise CondaRuntimeError('RuntimeError: %s' % e)
except SystemExit as e:
raise CondaSystemExit('Exiting', e)
if newenv:
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
if context.json:
common.stdout_json_success(actions=actions)
<|code_end|>
| conda/cli/install.py
# (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import errno
import logging
import os
import shutil
import tarfile
import tempfile
from difflib import get_close_matches
from os.path import abspath, basename, exists, isdir, join
from .. import text_type
from .._vendor.auxlib.ish import dals
from ..api import get_index
from ..base.constants import ROOT_ENV_NAME
from ..base.context import check_write, context
from ..cli import common
from ..cli.find_commands import find_executable
from ..exceptions import (CondaAssertionError, CondaEnvironmentNotFoundError,
CondaFileNotFoundError, CondaIOError, CondaImportError, CondaOSError,
CondaRuntimeError, CondaSystemExit, CondaValueError,
DirectoryNotFoundError, DryRunExit, LockError, NoPackagesFoundError,
PackageNotFoundError, TooManyArgumentsError, UnsatisfiableError)
from ..install import is_linked, linked as install_linked, name_dist
from ..misc import append_env, clone_env, explicit, touch_nonadmin
from ..plan import (add_defaults_to_specs, display_actions, execute_actions, get_pinned_specs,
install_actions, is_root_prefix, nothing_to_do, revert_actions)
from ..resolve import Resolve
from ..utils import on_win
log = logging.getLogger(__name__)
def install_tar(prefix, tar_path, verbose=False):
if not exists(tar_path):
raise CondaFileNotFoundError(tar_path)
tmp_dir = tempfile.mkdtemp()
t = tarfile.open(tar_path, 'r')
t.extractall(path=tmp_dir)
t.close()
paths = []
for root, dirs, files in os.walk(tmp_dir):
for fn in files:
if fn.endswith('.tar.bz2'):
paths.append(join(root, fn))
explicit(paths, prefix, verbose=verbose)
shutil.rmtree(tmp_dir)
def check_prefix(prefix, json=False):
name = basename(prefix)
error = None
if name.startswith('.'):
error = "environment name cannot start with '.': %s" % name
if name == ROOT_ENV_NAME:
error = "'%s' is a reserved environment name" % name
if exists(prefix):
if isdir(prefix) and 'conda-meta' not in os.listdir(prefix):
return None
error = "prefix already exists: %s" % prefix
if error:
raise CondaValueError(error, json)
def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None):
if os.sep in src_arg:
src_prefix = abspath(src_arg)
if not isdir(src_prefix):
raise DirectoryNotFoundError(src_arg, 'no such directory: %s' % src_arg, json)
else:
src_prefix = context.clone_src
if not json:
print("Source: %s" % src_prefix)
print("Destination: %s" % dst_prefix)
with common.json_progress_bars(json=json and not quiet):
actions, untracked_files = clone_env(src_prefix, dst_prefix,
verbose=not json,
quiet=quiet,
index_args=index_args)
if json:
common.stdout_json_success(
actions=actions,
untracked_files=list(untracked_files),
src_prefix=src_prefix,
dst_prefix=dst_prefix
)
def print_activate(arg):
if on_win:
message = dals("""
#
# To activate this environment, use:
# > activate %s
#
# To deactivate this environment, use:
# > deactivate %s
#
# * for power-users using bash, you must source
#
""")
else:
message = dals("""
#
# To activate this environment, use:
# > source activate %s
#
# To deactivate this environment, use:
# > source deactivate %s
#
""")
return message % (arg, arg)
def get_revision(arg, json=False):
try:
return int(arg)
except ValueError:
CondaValueError("expected revision number, not: '%s'" % arg, json)
def install(args, parser, command='install'):
"""
conda install, conda update, and conda create
"""
context.validate_all()
newenv = bool(command == 'create')
isupdate = bool(command == 'update')
isinstall = bool(command == 'install')
if newenv:
common.ensure_name_or_prefix(args, command)
prefix = context.prefix if newenv or args.mkdir else context.prefix_w_legacy_search
if newenv:
check_prefix(prefix, json=context.json)
if context.force_32bit and is_root_prefix(prefix):
raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in root env")
if isupdate and not (args.file or args.all or args.packages):
raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)
linked = install_linked(prefix)
lnames = {name_dist(d) for d in linked}
if isupdate and not args.all:
for name in args.packages:
common.arg2spec(name, json=context.json, update=True)
if name not in lnames:
raise PackageNotFoundError(name, "Package '%s' is not installed in %s" %
(name, prefix))
if newenv and not args.no_default_packages:
default_packages = list(context.create_default_packages)
# Override defaults if they are specified at the command line
for default_pkg in context.create_default_packages:
if any(pkg.split('=')[0] == default_pkg for pkg in args.packages):
default_packages.remove(default_pkg)
args.packages.extend(default_packages)
else:
default_packages = []
common.ensure_use_local(args)
common.ensure_override_channels_requires_channel(args)
index_args = {
'use_cache': args.use_index_cache,
'channel_urls': args.channel or (),
'unknown': args.unknown,
'prepend': not args.override_channels,
'use_local': args.use_local
}
specs = []
if args.file:
for fpath in args.file:
specs.extend(common.specs_from_url(fpath, json=context.json))
if '@EXPLICIT' in specs:
explicit(specs, prefix, verbose=not context.quiet, index_args=index_args)
return
elif getattr(args, 'all', False):
if not linked:
raise PackageNotFoundError('', "There are no packages installed in the "
"prefix %s" % prefix)
specs.extend(nm for nm in lnames)
specs.extend(common.specs_from_args(args.packages, json=context.json))
if isinstall and args.revision:
get_revision(args.revision, json=context.json)
elif isinstall and not (args.file or args.packages):
raise CondaValueError("too few arguments, "
"must supply command line package specs or --file")
num_cp = sum(s.endswith('.tar.bz2') for s in args.packages)
if num_cp:
if num_cp == len(args.packages):
explicit(args.packages, prefix, verbose=not context.quiet)
return
else:
raise CondaValueError("cannot mix specifications with conda package"
" filenames")
# handle tar file containing conda packages
if len(args.packages) == 1:
tar_path = args.packages[0]
if tar_path.endswith('.tar'):
install_tar(prefix, tar_path, verbose=not context.quiet)
return
if newenv and args.clone:
package_diff = set(args.packages) - set(default_packages)
if package_diff:
raise TooManyArgumentsError(0, len(package_diff), list(package_diff),
'did not expect any arguments for --clone')
clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args)
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
return
index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'],
platform=None, use_local=index_args['use_local'],
use_cache=index_args['use_cache'], unknown=index_args['unknown'],
prefix=prefix)
r = Resolve(index)
ospecs = list(specs)
add_defaults_to_specs(r, linked, specs, update=isupdate)
# Don't update packages that are already up-to-date
if isupdate and not (args.all or args.force):
orig_packages = args.packages[:]
installed_metadata = [is_linked(prefix, dist) for dist in linked]
for name in orig_packages:
vers_inst = [m['version'] for m in installed_metadata if m['name'] == name]
build_inst = [m['build_number'] for m in installed_metadata if m['name'] == name]
channel_inst = [m['channel'] for m in installed_metadata if m['name'] == name]
try:
assert len(vers_inst) == 1, name
assert len(build_inst) == 1, name
assert len(channel_inst) == 1, name
except AssertionError as e:
raise CondaAssertionError(text_type(e))
pkgs = sorted(r.get_pkgs(name))
if not pkgs:
# Shouldn't happen?
continue
latest = pkgs[-1]
if all([latest.version == vers_inst[0],
latest.build_number == build_inst[0],
latest.channel == channel_inst[0]]):
args.packages.remove(name)
if not args.packages:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(orig_packages)
print('# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
if args.force:
args.no_deps = True
if args.no_deps:
only_names = set(s.split()[0] for s in ospecs)
else:
only_names = None
if not isdir(prefix) and not newenv:
if args.mkdir:
try:
os.makedirs(prefix)
except OSError:
raise CondaOSError("Error: could not create directory: %s" % prefix)
else:
raise CondaEnvironmentNotFoundError(prefix)
try:
if isinstall and args.revision:
actions = revert_actions(prefix, get_revision(args.revision), index)
else:
with common.json_progress_bars(json=context.json and not context.quiet):
actions = install_actions(prefix, index, specs,
force=args.force,
only_names=only_names,
pinned=args.pinned,
always_copy=context.always_copy,
minimal_hint=args.alt_hint,
update_deps=context.update_dependencies)
except NoPackagesFoundError as e:
error_message = [e.args[0]]
if isupdate and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not context.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
for pkg in e.pkgs:
p = pkg.split()[0]
if p in args._skip:
# Avoid infinite recursion. This can happen if a spec
# comes from elsewhere, like --file
raise
args._skip.append(p)
return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}
nfound = 0
for pkg in sorted(e.pkgs):
pkg = pkg.split()[0]
if pkg in packages:
continue
close = get_close_matches(pkg, packages, cutoff=0.7)
if not close:
continue
if nfound == 0:
error_message.append("\n\nClose matches found; did you mean one of these?\n")
error_message.append("\n %s: %s" % (pkg, ', '.join(close)))
nfound += 1
error_message.append('\n\nYou can search for packages on anaconda.org with')
error_message.append('\n\n anaconda search -t conda %s' % pkg)
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message.append('\n\n(and similarly for the other packages)')
if not find_executable('anaconda', include_others=False):
error_message.append('\n\nYou may need to install the anaconda-client')
error_message.append(' command line client with')
error_message.append('\n\n conda install anaconda-client')
pinned_specs = get_pinned_specs(prefix)
if pinned_specs:
path = join(prefix, 'conda-meta', 'pinned')
error_message.append("\n\nNote that you have pinned specs in %s:" % path)
error_message.append("\n\n %r" % pinned_specs)
error_message = ''.join(error_message)
raise PackageNotFoundError('', error_message)
except (UnsatisfiableError, SystemExit) as e:
# Unsatisfiable package specifications/no such revision/import error
if e.args and 'could not import' in e.args[0]:
raise CondaImportError(text_type(e))
raise
if nothing_to_do(actions) and not newenv:
from .main_list import print_packages
if not context.json:
regex = '^(%s)$' % '|'.join(s.split()[0] for s in ospecs)
print('\n# All requested packages already installed.')
print_packages(prefix, regex)
else:
common.stdout_json_success(
message='All requested packages already installed.')
return
elif newenv:
# needed in the case of creating an empty env
from ..instructions import LINK, UNLINK, SYMLINK_CONDA
if not actions[LINK] and not actions[UNLINK]:
actions[SYMLINK_CONDA] = [context.root_dir]
if not context.json:
print()
print("Package plan for installation in environment %s:" % prefix)
display_actions(actions, index, show_channel_urls=context.show_channel_urls)
if command in {'install', 'update'}:
check_write(command, prefix)
if not context.json:
common.confirm_yn(args)
elif args.dry_run:
common.stdout_json_success(actions=actions, dry_run=True)
raise DryRunExit()
with common.json_progress_bars(json=context.json and not context.quiet):
try:
execute_actions(actions, index, verbose=not context.quiet)
if not (command == 'update' and args.all):
try:
with open(join(prefix, 'conda-meta', 'history'), 'a') as f:
f.write('# %s specs: %s\n' % (command, specs))
except IOError as e:
if e.errno == errno.EACCES:
log.debug("Can't write the history file")
else:
raise CondaIOError("Can't write the history file", e)
except RuntimeError as e:
if len(e.args) > 0 and "LOCKERROR" in e.args[0]:
raise LockError('Already locked: %s' % text_type(e))
else:
raise CondaRuntimeError('RuntimeError: %s' % e)
except SystemExit as e:
raise CondaSystemExit('Exiting', e)
if newenv:
append_env(prefix)
touch_nonadmin(prefix)
if not context.json:
print(print_activate(args.name if args.name else prefix))
if context.json:
common.stdout_json_success(actions=actions)
| conda/cli/install.py
--- a/conda/cli/install.py
+++ b/conda/cli/install.py
@@ -142,7 +142,7 @@ def install(args, parser, command='install'):
isinstall = bool(command == 'install')
if newenv:
common.ensure_name_or_prefix(args, command)
- prefix = context.prefix if newenv else context.prefix_w_legacy_search
+ prefix = context.prefix if newenv or args.mkdir else context.prefix_w_legacy_search
if newenv:
check_prefix(prefix, json=context.json)
if context.force_32bit and is_root_prefix(prefix): |
Conda 4.2.x attempts to create lock files in readonly file-based channels and does not respect CONDA_ENVS_PATH
I am getting warnings and eventually an error after upgrading from 4.1.11 to 4.2.7 relating to lock files and packages attempting to write to shared read-only directories in our continuous build.
I modified `/shared/path/to/miniconda/lib/python2.7/site-packages/conda/lock.py` to find out where conda 4.2.x was putting lock files and found the following:
`WARNING conda.lock:touch(55): Failed to create lock, do not run conda in parallel processes [errno 13] /shared/path/to/vendor/conda/linux-64.pid24915.conda_lock`
`WARNING conda.lock:touch(55): Failed to create lock, do not run conda in parallel processes [errno 13] /shared/path/to/miniconda/pkgs/python-dateutil-2.4.2-py27_0.tar.bz2.pid24915.conda_lock`
`/shared/path/to` is read-only because it is used by multiple build-agents concurrently.
Additional I encountered the following error:
`CondaRuntimeError: Runtime error: RuntimeError: Runtime error: Could not open u'/shared/path/to/miniconda/pkgs/python-dateutil-2.4.2-py27_0.tar.bz2.part' for writing ([Errno 13] Permission denied: u'/shared/path/to/miniconda/pkgs/python-dateutil-2.4.2-py27_0.tar.bz2.part').`
In conda <= 4.1.11 we were able to set the `CONDA_ENVS_PATH` to have a shared installation of conda, but with separate package caches/lock files per build agent on our continuous build server, in order to avoid concurrent use of the package cache.
Two questions:
- How do we disable lock files being placed in read-only shared file-based channels.
- Does `CONDA_ENVS_PATH` no longer override the package cache/lock file directory from the root?
Conda 4.2.x attempts to create lock files in readonly file-based channels and does not respect CONDA_ENVS_PATH
I am getting warnings and eventually an error after upgrading from 4.1.11 to 4.2.7 relating to lock files and packages attempting to write to shared read-only directories in our continuous build.
I modified `/shared/path/to/miniconda/lib/python2.7/site-packages/conda/lock.py` to find out where conda 4.2.x was putting lock files and found the following:
`WARNING conda.lock:touch(55): Failed to create lock, do not run conda in parallel processes [errno 13] /shared/path/to/vendor/conda/linux-64.pid24915.conda_lock`
`WARNING conda.lock:touch(55): Failed to create lock, do not run conda in parallel processes [errno 13] /shared/path/to/miniconda/pkgs/python-dateutil-2.4.2-py27_0.tar.bz2.pid24915.conda_lock`
`/shared/path/to` is read-only because it is used by multiple build-agents concurrently.
Additional I encountered the following error:
`CondaRuntimeError: Runtime error: RuntimeError: Runtime error: Could not open u'/shared/path/to/miniconda/pkgs/python-dateutil-2.4.2-py27_0.tar.bz2.part' for writing ([Errno 13] Permission denied: u'/shared/path/to/miniconda/pkgs/python-dateutil-2.4.2-py27_0.tar.bz2.part').`
In conda <= 4.1.11 we were able to set the `CONDA_ENVS_PATH` to have a shared installation of conda, but with separate package caches/lock files per build agent on our continuous build server, in order to avoid concurrent use of the package cache.
Two questions:
- How do we disable lock files being placed in read-only shared file-based channels.
- Does `CONDA_ENVS_PATH` no longer override the package cache/lock file directory from the root?
| conda/base/context.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
import sys
from collections import Sequence
from itertools import chain
from logging import getLogger
from os.path import abspath, basename, dirname, expanduser, isdir, join
from platform import machine
from .constants import DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME, SEARCH_PATH, conda
from .._vendor.auxlib.compat import NoneType, string_types
from .._vendor.auxlib.decorators import memoizedproperty
from .._vendor.auxlib.ish import dals
from .._vendor.auxlib.path import expand
from ..common.compat import iteritems, odict
from ..common.configuration import (Configuration, LoadError, MapParameter, PrimitiveParameter,
SequenceParameter)
from ..common.disk import try_write
from ..common.url import has_scheme, path_to_url, split_scheme_auth_token, urlparse
from ..exceptions import CondaEnvironmentNotFoundError, CondaValueError
try:
from cytoolz.itertoolz import concat, concatv
except ImportError:
from .._vendor.toolz.itertoolz import concat, concatv
log = getLogger(__name__)
try:
import cio_test # NOQA
except ImportError:
log.info("No cio_test package found.")
_platform_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
_arch_names = {
32: 'x86',
64: 'x86_64',
}
def channel_alias_validation(value):
if value and not has_scheme(value):
return "channel_alias value '%s' must have scheme/protocol." % value
return True
class Context(Configuration):
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
auto_update_conda = PrimitiveParameter(True, aliases=('self_update',))
changeps1 = PrimitiveParameter(True)
create_default_packages = SequenceParameter(string_types)
disallow = SequenceParameter(string_types)
force_32bit = PrimitiveParameter(False)
track_features = SequenceParameter(string_types)
use_pip = PrimitiveParameter(True)
_root_dir = PrimitiveParameter(sys.prefix, aliases=('root_dir',))
# connection details
ssl_verify = PrimitiveParameter(True, parameter_type=string_types + (bool,))
client_tls_cert = PrimitiveParameter('', aliases=('client_cert',))
client_tls_cert_key = PrimitiveParameter('', aliases=('client_cert_key',))
proxy_servers = MapParameter(string_types)
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
_channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS,
aliases=('channel_alias',),
validation=channel_alias_validation)
# channels
channels = SequenceParameter(string_types, default=('defaults',))
_migrated_channel_aliases = SequenceParameter(string_types,
aliases=('migrated_channel_aliases',)) # TODO: also take a list of strings # NOQA
_default_channels = SequenceParameter(string_types, DEFAULT_CHANNELS,
aliases=('default_channels',))
_custom_channels = MapParameter(string_types, aliases=('custom_channels',))
migrated_custom_channels = MapParameter(string_types) # TODO: also take a list of strings
_custom_multichannels = MapParameter(Sequence, aliases=('custom_multichannels',))
# command line
always_copy = PrimitiveParameter(False, aliases=('copy',))
always_yes = PrimitiveParameter(False, aliases=('yes',))
channel_priority = PrimitiveParameter(True)
debug = PrimitiveParameter(False)
json = PrimitiveParameter(False)
offline = PrimitiveParameter(False)
quiet = PrimitiveParameter(False)
shortcuts = PrimitiveParameter(True)
show_channel_urls = PrimitiveParameter(None, parameter_type=(bool, NoneType))
update_dependencies = PrimitiveParameter(True, aliases=('update_deps',))
verbosity = PrimitiveParameter(0, aliases=('verbose',), parameter_type=int)
# conda_build
bld_path = PrimitiveParameter('')
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
@property
def default_python(self):
ver = sys.version_info
return '%d.%d' % (ver.major, ver.minor)
@property
def arch_name(self):
m = machine()
if self.platform == 'linux' and m in non_x86_linux_machines:
return m
else:
return _arch_names[self.bits]
@property
def platform(self):
return _platform_map.get(sys.platform, 'unknown')
@property
def subdir(self):
m = machine()
if m in non_x86_linux_machines:
return 'linux-%s' % m
else:
return '%s-%d' % (self.platform, self.bits)
@property
def bits(self):
if self.force_32bit:
return 32
else:
return 8 * tuple.__itemsize__
@property
def local_build_root(self):
# TODO: import from conda_build, and fall back to something incredibly simple
if self.bld_path:
return expand(self.bld_path)
elif self.root_writable:
return join(self.conda_prefix, 'conda-bld')
else:
return expand('~/conda-bld')
@property
def root_dir(self):
# root_dir is an alias for root_prefix, we prefer the name "root_prefix"
# because it is more consistent with other names
return abspath(expanduser(self._root_dir))
@property
def root_writable(self):
return try_write(self.root_dir)
_envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs',))
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
for p in concatv(self._envs_dirs,
(join(self.root_dir, 'envs'), )
if self.root_writable
else ('~/.conda/envs', join(self.root_dir, 'envs'))))
@property
def pkgs_dirs(self):
return [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in self.envs_dirs]
@property
def default_prefix(self):
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, ROOT_ENV_NAME):
return self.root_dir
elif os.sep in _default_env:
return abspath(_default_env)
else:
for envs_dir in self.envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
return default_prefix
return join(self.envs_dirs[0], _default_env)
@property
def prefix(self):
return get_prefix(self, self._argparse_args, False)
@property
def prefix_w_legacy_search(self):
return get_prefix(self, self._argparse_args, True)
@property
def clone_src(self):
assert self._argparse_args.clone is not None
return locate_prefix_by_name(self, self._argparse_args.clone)
@property
def conda_in_root(self):
return not conda_in_private_env()
@property
def conda_private(self):
return conda_in_private_env()
@property
def root_prefix(self):
return abspath(join(sys.prefix, '..', '..')) if conda_in_private_env() else sys.prefix
@property
def conda_prefix(self):
return sys.prefix
@memoizedproperty
def channel_alias(self):
from ..models.channel import Channel
location, scheme, auth, token = split_scheme_auth_token(self._channel_alias)
return Channel(scheme=scheme, auth=auth, location=location, token=token)
@property
def migrated_channel_aliases(self):
from ..models.channel import Channel
return tuple(Channel(scheme=scheme, auth=auth, location=location, token=token)
for location, scheme, auth, token in
(split_scheme_auth_token(c) for c in self._migrated_channel_aliases))
@memoizedproperty
def default_channels(self):
# the format for 'default_channels' is a list of strings that either
# - start with a scheme
# - are meant to be prepended with channel_alias
from ..models.channel import Channel
return tuple(Channel.make_simple_channel(self.channel_alias, v)
for v in self._default_channels)
@memoizedproperty
def local_build_root_channel(self):
from ..models.channel import Channel
url_parts = urlparse(path_to_url(self.local_build_root))
location, name = url_parts.path.rsplit('/', 1)
if not location:
location = '/'
assert name == 'conda-bld'
return Channel(scheme=url_parts.scheme, location=location, name=name)
@memoizedproperty
def custom_multichannels(self):
from ..models.channel import Channel
default_custom_multichannels = {
'defaults': self.default_channels,
'local': (self.local_build_root_channel,),
}
all_channels = default_custom_multichannels, self._custom_multichannels
return odict((name, tuple(Channel(v) for v in c))
for name, c in concat(map(iteritems, all_channels)))
@memoizedproperty
def custom_channels(self):
from ..models.channel import Channel
custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
for name, url in iteritems(self._custom_channels))
all_sources = self.default_channels, (self.local_build_root_channel,), custom_channels
all_channels = (ch for ch in concat(all_sources))
return odict((x.name, x) for x in all_channels)
def conda_in_private_env():
# conda is located in its own private environment named '_conda'
return basename(sys.prefix) == '_conda' and basename(dirname(sys.prefix)) == 'envs'
def reset_context(search_path=SEARCH_PATH, argparse_args=None):
context.__init__(search_path, conda, argparse_args)
from ..models.channel import Channel
Channel._reset_state()
return context
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(context.root_dir, 'envs')):
return join(context.root_dir, 'pkgs32' if context.force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
def get_help_dict():
# this is a function so that most of the time it's not evaluated and loaded into memory
return {
'add_pip_as_python_dependency': dals("""
"""),
'always_yes': dals("""
"""),
'always_copy': dals("""
"""),
'changeps1': dals("""
"""),
'use_pip': dals("""
Use pip when listing packages with conda list. Note that this does not affect any
conda command or functionality other than the output of the command conda list.
"""),
'binstar_upload': dals("""
"""),
'allow_softlinks': dals("""
"""),
'self_update': dals("""
"""),
'show_channel_urls': dals("""
# show channel URLs when displaying what is going to be downloaded
# None means letting conda decide
"""),
'update_dependencies': dals("""
"""),
'channel_priority': dals("""
"""),
'ssl_verify': dals("""
# ssl_verify can be a boolean value or a filename string
"""),
'client_tls_cert': dals("""
# client_tls_cert can be a path pointing to a single file
# containing the private key and the certificate (e.g. .pem),
# or use 'client_tls_cert_key' in conjuction with 'client_tls_cert' for
# individual files
"""),
'client_tls_cert_key': dals("""
# used in conjunction with 'client_tls_cert' for a matching key file
"""),
'track_features': dals("""
"""),
'channels': dals("""
"""),
'disallow': dals("""
# set packages disallowed to be installed
"""),
'create_default_packages': dals("""
# packages which are added to a newly created environment by default
"""),
'envs_dirs': dals("""
"""),
'default_channels': dals("""
"""),
'proxy_servers': dals("""
"""),
'force_32bit': dals("""
CONDA_FORCE_32BIT should only be used when running conda-build (in order
to build 32-bit packages on a 64-bit system). We don't want to mention it
in the documentation, because it can mess up a lot of things.
""")
}
def get_prefix(ctx, args, search=True):
"""Get the prefix to operate in
Args:
ctx: the context of conda
args: the argparse args from the command line
search: whether search for prefix
Returns: the prefix
Raises: CondaEnvironmentNotFoundError if the prefix is invalid
"""
if args.name:
if '/' in args.name:
raise CondaValueError("'/' not allowed in environment name: %s" %
args.name, getattr(args, 'json', False))
if args.name == ROOT_ENV_NAME:
return ctx.root_dir
if search:
return locate_prefix_by_name(ctx, args.name)
else:
return join(ctx.envs_dirs[0], args.name)
elif args.prefix:
return abspath(expanduser(args.prefix))
else:
return ctx.default_prefix
def locate_prefix_by_name(ctx, name):
""" Find the location of a prefix given a conda env name.
Args:
ctx (Context): the context object
name (str): the name of prefix to find
Returns:
str: the location of the prefix found, or CondaValueError will raise if not found
Raises:
CondaValueError: when no prefix is found
"""
if name == ROOT_ENV_NAME:
return ctx.root_dir
# look for a directory named `name` in all envs_dirs AND in CWD
for envs_dir in chain(ctx.envs_dirs + (os.getcwd(),)):
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
raise CondaEnvironmentNotFoundError(name)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(context.root_dir) and
not context.root_writable)
try:
context = Context(SEARCH_PATH, conda, None)
except LoadError as e:
print(e, file=sys.stderr)
# Exception handler isn't loaded so use sys.exit
sys.exit(1)
<|code_end|>
conda/cli/main_config.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import collections
import json
import os
import sys
from .common import (Completer, add_parser_json, stdout_json_success)
from .. import CondaError
from .._vendor.auxlib.compat import isiterable
from .._vendor.auxlib.type_coercion import boolify
from ..base.context import context
from ..common.configuration import pretty_list, pretty_map
from ..common.yaml import yaml_dump, yaml_load
from ..compat import iteritems, string_types
from ..config import (rc_bool_keys, rc_list_keys, rc_other, rc_string_keys, sys_rc_path,
user_rc_path)
from ..exceptions import CondaKeyError, CondaValueError, CouldntParseError
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--show",
action="store_true",
help="Display all configuration values as calculated and compiled.",
)
action.add_argument(
"--show-sources",
action="store_true",
help="Display all identified configuration sources.",
)
action.add_argument(
"--validate",
action="store_true",
help="Validate all configuration sources.",
)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar='KEY',
choices=BoolOrListKey()
)
action.add_argument(
"--append",
nargs=2,
action="append",
help="""Add one configuration value to the end of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--prepend", "--add",
nargs=2,
action="append",
help="""Add one configuration value to the beginning of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParseError, NotImplementedError) as e:
raise CondaError(e)
def format_dict(d):
lines = []
for k, v in iteritems(d):
if isinstance(v, collections.Mapping):
if v:
lines.append("%s:" % k)
lines.append(pretty_map(v))
else:
lines.append("%s: {}" % k)
elif isiterable(v):
if v:
lines.append("%s:" % k)
lines.append(pretty_list(v))
else:
lines.append("%s: []" % k)
else:
lines.append("%s: %s" % (k, v if v is not None else "None"))
return lines
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.show_sources:
if context.json:
print(json.dumps(context.collect_all(), sort_keys=True,
indent=2, separators=(',', ': ')))
else:
lines = []
for source, reprs in iteritems(context.collect_all()):
lines.append("==> %s <==" % source)
lines.extend(format_dict(reprs))
lines.append('')
print('\n'.join(lines))
return
if args.show:
from collections import OrderedDict
d = OrderedDict((key, getattr(context, key))
for key in sorted(('add_anaconda_token',
'add_pip_as_python_dependency',
'allow_softlinks',
'always_copy',
'always_yes',
'auto_update_conda',
'binstar_upload',
'changeps1',
'channel_alias',
'channel_priority',
'channels',
'client_tls_cert',
'client_tls_cert_key',
'create_default_packages',
'debug',
'default_channels',
'disallow',
'json',
'offline',
'proxy_servers',
'quiet',
'shortcuts',
'show_channel_urls',
'ssl_verify',
'track_features',
'update_dependencies',
'use_pip',
'verbosity',
)))
if context.json:
print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': ')))
else:
print('\n'.join(format_dict(d)))
return
if args.validate:
context.validate_all()
return
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh) or {}
else:
rc_config = {}
# Get
if args.get is not None:
context.validate_all()
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not context.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if context.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else: # assume the key is a list-type
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
items = rc_config.get(key, [])
numitems = len(items)
for q, item in enumerate(reversed(items)):
# Use repr so that it can be pasted back in to conda config --add
if key == "channels" and q in (0, numitems-1):
print("--add", key, repr(item),
" # lowest priority" if q == 0 else " # highest priority")
else:
print("--add", key, repr(item))
# prepend, append, add
for arg, prepend in zip((args.prepend, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
if key not in rc_list_keys:
raise CondaValueError("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key))
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParseError("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
arglist = rc_config.setdefault(key, [])
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
item, key, "top" if prepend else "bottom")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not context.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
arglist.insert(0 if prepend else len(arglist), item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
if key in set_bools:
rc_config[key] = boolify(item)
elif key in set_strings:
assert isinstance(item, string_types)
rc_config[key] = item
else:
raise CondaValueError("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key))
# Remove
for key, item in args.remove:
if key not in rc_config:
if key != 'channels':
raise CondaKeyError(key, "key %r is not in the config file" % key)
rc_config[key] = ['defaults']
if item not in rc_config[key]:
raise CondaKeyError(key, "%r is not in the %r key of the config file" %
(item, key))
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
raise CondaKeyError(key, "key %r is not in the config file" %
key)
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if context.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
<|code_end|>
| conda/base/context.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
import sys
from collections import Sequence
from itertools import chain
from logging import getLogger
from os.path import abspath, basename, dirname, expanduser, isdir, join
from platform import machine
from .constants import DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME, SEARCH_PATH, conda
from .._vendor.auxlib.compat import NoneType, string_types
from .._vendor.auxlib.decorators import memoizedproperty
from .._vendor.auxlib.ish import dals
from .._vendor.auxlib.path import expand
from ..common.compat import iteritems, odict
from ..common.configuration import (Configuration, LoadError, MapParameter, PrimitiveParameter,
SequenceParameter)
from ..common.disk import try_write
from ..common.url import has_scheme, path_to_url, split_scheme_auth_token, urlparse
from ..exceptions import CondaEnvironmentNotFoundError, CondaValueError
try:
from cytoolz.itertoolz import concat, concatv
except ImportError:
from .._vendor.toolz.itertoolz import concat, concatv
log = getLogger(__name__)
try:
import cio_test # NOQA
except ImportError:
log.info("No cio_test package found.")
_platform_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
_arch_names = {
32: 'x86',
64: 'x86_64',
}
def channel_alias_validation(value):
if value and not has_scheme(value):
return "channel_alias value '%s' must have scheme/protocol." % value
return True
class Context(Configuration):
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
auto_update_conda = PrimitiveParameter(True, aliases=('self_update',))
changeps1 = PrimitiveParameter(True)
create_default_packages = SequenceParameter(string_types)
disallow = SequenceParameter(string_types)
force_32bit = PrimitiveParameter(False)
track_features = SequenceParameter(string_types)
use_pip = PrimitiveParameter(True)
_root_dir = PrimitiveParameter(sys.prefix, aliases=('root_dir',))
# connection details
ssl_verify = PrimitiveParameter(True, parameter_type=string_types + (bool,))
client_tls_cert = PrimitiveParameter('', aliases=('client_cert',))
client_tls_cert_key = PrimitiveParameter('', aliases=('client_cert_key',))
proxy_servers = MapParameter(string_types)
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
_channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS,
aliases=('channel_alias',),
validation=channel_alias_validation)
# channels
channels = SequenceParameter(string_types, default=('defaults',))
_migrated_channel_aliases = SequenceParameter(string_types,
aliases=('migrated_channel_aliases',)) # TODO: also take a list of strings # NOQA
_default_channels = SequenceParameter(string_types, DEFAULT_CHANNELS,
aliases=('default_channels',))
_custom_channels = MapParameter(string_types, aliases=('custom_channels',))
migrated_custom_channels = MapParameter(string_types) # TODO: also take a list of strings
_custom_multichannels = MapParameter(Sequence, aliases=('custom_multichannels',))
# command line
always_copy = PrimitiveParameter(False, aliases=('copy',))
always_yes = PrimitiveParameter(False, aliases=('yes',))
channel_priority = PrimitiveParameter(True)
debug = PrimitiveParameter(False)
json = PrimitiveParameter(False)
offline = PrimitiveParameter(False)
quiet = PrimitiveParameter(False)
shortcuts = PrimitiveParameter(True)
show_channel_urls = PrimitiveParameter(None, parameter_type=(bool, NoneType))
update_dependencies = PrimitiveParameter(True, aliases=('update_deps',))
verbosity = PrimitiveParameter(0, aliases=('verbose',), parameter_type=int)
# conda_build
bld_path = PrimitiveParameter('')
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
_envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs', 'envs_path'),
string_delimiter=os.pathsep)
@property
def default_python(self):
ver = sys.version_info
return '%d.%d' % (ver.major, ver.minor)
@property
def arch_name(self):
m = machine()
if self.platform == 'linux' and m in non_x86_linux_machines:
return m
else:
return _arch_names[self.bits]
@property
def platform(self):
return _platform_map.get(sys.platform, 'unknown')
@property
def subdir(self):
m = machine()
if m in non_x86_linux_machines:
return 'linux-%s' % m
else:
return '%s-%d' % (self.platform, self.bits)
@property
def bits(self):
if self.force_32bit:
return 32
else:
return 8 * tuple.__itemsize__
@property
def local_build_root(self):
# TODO: import from conda_build, and fall back to something incredibly simple
if self.bld_path:
return expand(self.bld_path)
elif self.root_writable:
return join(self.conda_prefix, 'conda-bld')
else:
return expand('~/conda-bld')
@property
def root_dir(self):
# root_dir is an alias for root_prefix, we prefer the name "root_prefix"
# because it is more consistent with other names
return abspath(expanduser(self._root_dir))
@property
def root_writable(self):
return try_write(self.root_dir)
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
for p in concatv(self._envs_dirs,
(join(self.root_dir, 'envs'), )
if self.root_writable
else ('~/.conda/envs', join(self.root_dir, 'envs'))))
@property
def pkgs_dirs(self):
return [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in self.envs_dirs]
@property
def default_prefix(self):
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, ROOT_ENV_NAME):
return self.root_dir
elif os.sep in _default_env:
return abspath(_default_env)
else:
for envs_dir in self.envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
return default_prefix
return join(self.envs_dirs[0], _default_env)
@property
def prefix(self):
return get_prefix(self, self._argparse_args, False)
@property
def prefix_w_legacy_search(self):
return get_prefix(self, self._argparse_args, True)
@property
def clone_src(self):
assert self._argparse_args.clone is not None
return locate_prefix_by_name(self, self._argparse_args.clone)
@property
def conda_in_root(self):
return not conda_in_private_env()
@property
def conda_private(self):
return conda_in_private_env()
@property
def root_prefix(self):
return abspath(join(sys.prefix, '..', '..')) if conda_in_private_env() else sys.prefix
@property
def conda_prefix(self):
return sys.prefix
@memoizedproperty
def channel_alias(self):
from ..models.channel import Channel
location, scheme, auth, token = split_scheme_auth_token(self._channel_alias)
return Channel(scheme=scheme, auth=auth, location=location, token=token)
@property
def migrated_channel_aliases(self):
from ..models.channel import Channel
return tuple(Channel(scheme=scheme, auth=auth, location=location, token=token)
for location, scheme, auth, token in
(split_scheme_auth_token(c) for c in self._migrated_channel_aliases))
@memoizedproperty
def default_channels(self):
# the format for 'default_channels' is a list of strings that either
# - start with a scheme
# - are meant to be prepended with channel_alias
from ..models.channel import Channel
return tuple(Channel.make_simple_channel(self.channel_alias, v)
for v in self._default_channels)
@memoizedproperty
def local_build_root_channel(self):
from ..models.channel import Channel
url_parts = urlparse(path_to_url(self.local_build_root))
location, name = url_parts.path.rsplit('/', 1)
if not location:
location = '/'
assert name == 'conda-bld'
return Channel(scheme=url_parts.scheme, location=location, name=name)
@memoizedproperty
def custom_multichannels(self):
from ..models.channel import Channel
default_custom_multichannels = {
'defaults': self.default_channels,
'local': (self.local_build_root_channel,),
}
all_channels = default_custom_multichannels, self._custom_multichannels
return odict((name, tuple(Channel(v) for v in c))
for name, c in concat(map(iteritems, all_channels)))
@memoizedproperty
def custom_channels(self):
from ..models.channel import Channel
custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
for name, url in iteritems(self._custom_channels))
all_sources = self.default_channels, (self.local_build_root_channel,), custom_channels
all_channels = (ch for ch in concat(all_sources))
return odict((x.name, x) for x in all_channels)
def conda_in_private_env():
# conda is located in its own private environment named '_conda'
return basename(sys.prefix) == '_conda' and basename(dirname(sys.prefix)) == 'envs'
def reset_context(search_path=SEARCH_PATH, argparse_args=None):
context.__init__(search_path, conda, argparse_args)
from ..models.channel import Channel
Channel._reset_state()
return context
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(context.root_dir, 'envs')):
return join(context.root_dir, 'pkgs32' if context.force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
def get_help_dict():
# this is a function so that most of the time it's not evaluated and loaded into memory
return {
'add_pip_as_python_dependency': dals("""
"""),
'always_yes': dals("""
"""),
'always_copy': dals("""
"""),
'changeps1': dals("""
"""),
'use_pip': dals("""
Use pip when listing packages with conda list. Note that this does not affect any
conda command or functionality other than the output of the command conda list.
"""),
'binstar_upload': dals("""
"""),
'allow_softlinks': dals("""
"""),
'self_update': dals("""
"""),
'show_channel_urls': dals("""
# show channel URLs when displaying what is going to be downloaded
# None means letting conda decide
"""),
'update_dependencies': dals("""
"""),
'channel_priority': dals("""
"""),
'ssl_verify': dals("""
# ssl_verify can be a boolean value or a filename string
"""),
'client_tls_cert': dals("""
# client_tls_cert can be a path pointing to a single file
# containing the private key and the certificate (e.g. .pem),
# or use 'client_tls_cert_key' in conjuction with 'client_tls_cert' for
# individual files
"""),
'client_tls_cert_key': dals("""
# used in conjunction with 'client_tls_cert' for a matching key file
"""),
'track_features': dals("""
"""),
'channels': dals("""
"""),
'disallow': dals("""
# set packages disallowed to be installed
"""),
'create_default_packages': dals("""
# packages which are added to a newly created environment by default
"""),
'envs_dirs': dals("""
"""),
'default_channels': dals("""
"""),
'proxy_servers': dals("""
"""),
'force_32bit': dals("""
CONDA_FORCE_32BIT should only be used when running conda-build (in order
to build 32-bit packages on a 64-bit system). We don't want to mention it
in the documentation, because it can mess up a lot of things.
""")
}
def get_prefix(ctx, args, search=True):
"""Get the prefix to operate in
Args:
ctx: the context of conda
args: the argparse args from the command line
search: whether search for prefix
Returns: the prefix
Raises: CondaEnvironmentNotFoundError if the prefix is invalid
"""
if args.name:
if '/' in args.name:
raise CondaValueError("'/' not allowed in environment name: %s" %
args.name, getattr(args, 'json', False))
if args.name == ROOT_ENV_NAME:
return ctx.root_dir
if search:
return locate_prefix_by_name(ctx, args.name)
else:
return join(ctx.envs_dirs[0], args.name)
elif args.prefix:
return abspath(expanduser(args.prefix))
else:
return ctx.default_prefix
def locate_prefix_by_name(ctx, name):
""" Find the location of a prefix given a conda env name.
Args:
ctx (Context): the context object
name (str): the name of prefix to find
Returns:
str: the location of the prefix found, or CondaValueError will raise if not found
Raises:
CondaValueError: when no prefix is found
"""
if name == ROOT_ENV_NAME:
return ctx.root_dir
# look for a directory named `name` in all envs_dirs AND in CWD
for envs_dir in chain(ctx.envs_dirs + (os.getcwd(),)):
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
raise CondaEnvironmentNotFoundError(name)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(context.root_dir) and
not context.root_writable)
try:
context = Context(SEARCH_PATH, conda, None)
except LoadError as e:
print(e, file=sys.stderr)
# Exception handler isn't loaded so use sys.exit
sys.exit(1)
conda/cli/main_config.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import collections
import json
import os
import sys
from .common import (Completer, add_parser_json, stdout_json_success)
from .. import CondaError
from .._vendor.auxlib.compat import isiterable
from .._vendor.auxlib.type_coercion import boolify
from ..base.context import context
from ..common.configuration import pretty_list, pretty_map
from ..common.yaml import yaml_dump, yaml_load
from ..compat import iteritems, string_types
from ..config import (rc_bool_keys, rc_list_keys, rc_other, rc_string_keys, sys_rc_path,
user_rc_path)
from ..exceptions import CondaKeyError, CondaValueError, CouldntParseError
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--show",
action="store_true",
help="Display all configuration values as calculated and compiled.",
)
action.add_argument(
"--show-sources",
action="store_true",
help="Display all identified configuration sources.",
)
action.add_argument(
"--validate",
action="store_true",
help="Validate all configuration sources.",
)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar='KEY',
choices=BoolOrListKey()
)
action.add_argument(
"--append",
nargs=2,
action="append",
help="""Add one configuration value to the end of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--prepend", "--add",
nargs=2,
action="append",
help="""Add one configuration value to the beginning of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParseError, NotImplementedError) as e:
raise CondaError(e)
def format_dict(d):
lines = []
for k, v in iteritems(d):
if isinstance(v, collections.Mapping):
if v:
lines.append("%s:" % k)
lines.append(pretty_map(v))
else:
lines.append("%s: {}" % k)
elif isiterable(v):
if v:
lines.append("%s:" % k)
lines.append(pretty_list(v))
else:
lines.append("%s: []" % k)
else:
lines.append("%s: %s" % (k, v if v is not None else "None"))
return lines
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.show_sources:
if context.json:
print(json.dumps(context.collect_all(), sort_keys=True,
indent=2, separators=(',', ': ')))
else:
lines = []
for source, reprs in iteritems(context.collect_all()):
lines.append("==> %s <==" % source)
lines.extend(format_dict(reprs))
lines.append('')
print('\n'.join(lines))
return
if args.show:
from collections import OrderedDict
d = OrderedDict((key, getattr(context, key))
for key in sorted(('add_anaconda_token',
'add_pip_as_python_dependency',
'allow_softlinks',
'always_copy',
'always_yes',
'auto_update_conda',
'binstar_upload',
'changeps1',
'channel_alias',
'channel_priority',
'channels',
'client_tls_cert',
'client_tls_cert_key',
'create_default_packages',
'debug',
'default_channels',
'disallow',
'envs_dirs',
'json',
'offline',
'proxy_servers',
'quiet',
'shortcuts',
'show_channel_urls',
'ssl_verify',
'track_features',
'update_dependencies',
'use_pip',
'verbosity',
)))
if context.json:
print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': ')))
else:
print('\n'.join(format_dict(d)))
return
if args.validate:
context.validate_all()
return
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh) or {}
else:
rc_config = {}
# Get
if args.get is not None:
context.validate_all()
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not context.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if context.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else: # assume the key is a list-type
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
items = rc_config.get(key, [])
numitems = len(items)
for q, item in enumerate(reversed(items)):
# Use repr so that it can be pasted back in to conda config --add
if key == "channels" and q in (0, numitems-1):
print("--add", key, repr(item),
" # lowest priority" if q == 0 else " # highest priority")
else:
print("--add", key, repr(item))
# prepend, append, add
for arg, prepend in zip((args.prepend, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
if key not in rc_list_keys:
raise CondaValueError("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key))
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParseError("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
arglist = rc_config.setdefault(key, [])
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
item, key, "top" if prepend else "bottom")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not context.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
arglist.insert(0 if prepend else len(arglist), item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
if key in set_bools:
rc_config[key] = boolify(item)
elif key in set_strings:
assert isinstance(item, string_types)
rc_config[key] = item
else:
raise CondaValueError("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key))
# Remove
for key, item in args.remove:
if key not in rc_config:
if key != 'channels':
raise CondaKeyError(key, "key %r is not in the config file" % key)
rc_config[key] = ['defaults']
if item not in rc_config[key]:
raise CondaKeyError(key, "%r is not in the %r key of the config file" %
(item, key))
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
raise CondaKeyError(key, "key %r is not in the config file" %
key)
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if context.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
| conda/base/context.py
--- a/conda/base/context.py
+++ b/conda/base/context.py
@@ -104,6 +104,9 @@ class Context(Configuration):
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
+ _envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs', 'envs_path'),
+ string_delimiter=os.pathsep)
+
@property
def default_python(self):
ver = sys.version_info
@@ -156,8 +159,6 @@ def root_dir(self):
def root_writable(self):
return try_write(self.root_dir)
- _envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs',))
-
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
conda/cli/main_config.py
--- a/conda/cli/main_config.py
+++ b/conda/cli/main_config.py
@@ -289,6 +289,7 @@ def execute_config(args, parser):
'debug',
'default_channels',
'disallow',
+ 'envs_dirs',
'json',
'offline',
'proxy_servers', |
conda info exposing tokens
Hi, the conda info command is exposing tokens again. Por ejemplo:
```
Current conda install:
platform : win-64
conda version : 4.2.9
conda is private : False
conda-env version : 4.2.9
conda-build version : 2.0.2
python version : 2.7.12.final.0
requests version : 2.11.1
root environment : C:\Anaconda2 (writable)
default environment : C:\Anaconda2
envs directories : C:\Anaconda2\envs
package cache : C:\Anaconda2\pkgs
channel URLs : https://conda.anaconda.org/t/**EXPOSED TOKEN**/topper/win-64/
https://conda.anaconda.org/t/**EXPOSED TOKEN**/topper/noarch/
https://repo.continuum.io/pkgs/free/win-64/
https://repo.continuum.io/pkgs/free/noarch/
https://repo.continuum.io/pkgs/pro/win-64/
https://repo.continuum.io/pkgs/pro/noarch/
https://repo.continuum.io/pkgs/msys2/win-64/
https://repo.continuum.io/pkgs/msys2/noarch/
config file : d:\Usuarios\108630\.condarc
offline mode : False
```
| conda/cli/main_info.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import json
import os
import re
import sys
from collections import OrderedDict
from os import listdir
from os.path import exists, expanduser, join
from .common import (add_parser_json, add_parser_offline, arg2spec, disp_features,
handle_envs_list, stdout_json)
from ..compat import itervalues
from ..config import rc_path, sys_rc_path, user_rc_path
from ..models.channel import prioritize_channels
from ..utils import on_win
help = "Display information about current conda install."
example = """
Examples:
conda info -a
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'info',
description=help,
help=help,
epilog=example,
)
add_parser_json(p)
add_parser_offline(p)
p.add_argument(
'-a', "--all",
action="store_true",
help="Show all information, (environments, license, and system "
"information.")
p.add_argument(
'-e', "--envs",
action="store_true",
help="List all known conda environments.",
)
p.add_argument(
'-l', "--license",
action="store_true",
help="Display information about the local conda licenses list.",
)
p.add_argument(
'-s', "--system",
action="store_true",
help="List environment variables.",
)
p.add_argument(
'packages',
action="store",
nargs='*',
help="Display information about packages.",
)
p.add_argument(
'--root',
action='store_true',
help='Display root environment path.',
)
p.add_argument(
'--unsafe-channels',
action='store_true',
help='Display list of channels with tokens exposed.',
)
p.set_defaults(func=execute)
def show_pkg_info(name):
from conda.api import get_index
from conda.resolve import Resolve
index = get_index()
r = Resolve(index)
print(name)
if name in r.groups:
for pkg in sorted(r.get_pkgs(name)):
print(' %-15s %15s %s' % (
pkg.version,
pkg.build,
disp_features(r.features(pkg.fn))))
else:
print(' not available')
# TODO
python_re = re.compile('python\d\.\d')
def get_user_site():
site_dirs = []
if not on_win:
if exists(expanduser('~/.local/lib')):
for path in listdir(expanduser('~/.local/lib/')):
if python_re.match(path):
site_dirs.append("~/.local/lib/%s" % path)
else:
if 'APPDATA' not in os.environ:
return site_dirs
APPDATA = os.environ['APPDATA']
if exists(join(APPDATA, 'Python')):
site_dirs = [join(APPDATA, 'Python', i) for i in
listdir(join(APPDATA, 'PYTHON'))]
return site_dirs
def pretty_package(pkg):
from conda.utils import human_bytes
from conda.models.channel import Channel
d = OrderedDict([
('file name', pkg.fn),
('name', pkg.name),
('version', pkg.version),
('build number', pkg.build_number),
('build string', pkg.build),
('channel', Channel(pkg.channel).canonical_name),
('size', human_bytes(pkg.info['size'])),
])
rest = pkg.info
for key in sorted(rest):
if key in {'build', 'depends', 'requires', 'channel', 'name',
'version', 'build_number', 'size'}:
continue
d[key] = rest[key]
print()
header = "%s %s %s" % (d['name'], d['version'], d['build string'])
print(header)
print('-'*len(header))
for key in d:
print("%-12s: %s" % (key, d[key]))
print('dependencies:')
for dep in pkg.info['depends']:
print(' %s' % dep)
def execute(args, parser):
import os
from os.path import dirname
import conda
from conda.base.context import context
from conda.models.channel import offline_keep
from conda.resolve import Resolve
from conda.api import get_index
if args.root:
if context.json:
stdout_json({'root_prefix': context.root_dir})
else:
print(context.root_dir)
return
if args.packages:
index = get_index()
r = Resolve(index)
if context.json:
stdout_json({
package: [p._asdict()
for p in sorted(r.get_pkgs(arg2spec(package)))]
for package in args.packages
})
else:
for package in args.packages:
versions = r.get_pkgs(arg2spec(package))
for pkg in sorted(versions):
pretty_package(pkg)
return
options = 'envs', 'system', 'license'
try:
from conda.install import linked_data
root_pkgs = linked_data(sys.prefix)
except:
root_pkgs = None
try:
import requests
requests_version = requests.__version__
except ImportError:
requests_version = "could not import"
except Exception as e:
requests_version = "Error %s" % e
try:
import conda_env
conda_env_version = conda_env.__version__
except:
try:
cenv = [p for p in itervalues(root_pkgs) if p['name'] == 'conda-env']
conda_env_version = cenv[0]['version']
except:
conda_env_version = "not installed"
try:
import conda_build
except ImportError:
conda_build_version = "not installed"
except Exception as e:
conda_build_version = "Error %s" % e
else:
conda_build_version = conda_build.__version__
channels = context.channels
if args.unsafe_channels:
if not context.json:
print("\n".join(channels))
else:
print(json.dumps({"channels": channels}))
return 0
channels = list(prioritize_channels(channels).keys())
if not context.json:
channels = [c + ('' if offline_keep(c) else ' (offline)')
for c in channels]
info_dict = dict(
platform=context.subdir,
conda_version=conda.__version__,
conda_env_version=conda_env_version,
conda_build_version=conda_build_version,
root_prefix=context.root_dir,
conda_prefix=context.conda_prefix,
conda_private=context.conda_private,
root_writable=context.root_writable,
pkgs_dirs=context.pkgs_dirs,
envs_dirs=context.envs_dirs,
default_prefix=context.default_prefix,
channels=channels,
rc_path=rc_path,
user_rc_path=user_rc_path,
sys_rc_path=sys_rc_path,
# is_foreign=bool(foreign),
offline=context.offline,
envs=[],
python_version='.'.join(map(str, sys.version_info)),
requests_version=requests_version,
)
if args.all or context.json:
for option in options:
setattr(args, option, True)
if args.all or all(not getattr(args, opt) for opt in options):
for key in 'pkgs_dirs', 'envs_dirs', 'channels':
info_dict['_' + key] = ('\n' + 26 * ' ').join(info_dict[key])
info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else
'read only')
print("""\
Current conda install:
platform : %(platform)s
conda version : %(conda_version)s
conda is private : %(conda_private)s
conda-env version : %(conda_env_version)s
conda-build version : %(conda_build_version)s
python version : %(python_version)s
requests version : %(requests_version)s
root environment : %(root_prefix)s (%(_rtwro)s)
default environment : %(default_prefix)s
envs directories : %(_envs_dirs)s
package cache : %(_pkgs_dirs)s
channel URLs : %(_channels)s
config file : %(rc_path)s
offline mode : %(offline)s
""" % info_dict)
if args.envs:
handle_envs_list(info_dict['envs'], not context.json)
if args.system:
from conda.cli.find_commands import find_commands, find_executable
site_dirs = get_user_site()
evars = ['PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
'CIO_TEST', 'CONDA_ENVS_PATH']
if context.platform == 'linux':
evars.append('LD_LIBRARY_PATH')
elif context.platform == 'osx':
evars.append('DYLD_LIBRARY_PATH')
if context.json:
info_dict['sys.version'] = sys.version
info_dict['sys.prefix'] = sys.prefix
info_dict['sys.executable'] = sys.executable
info_dict['site_dirs'] = get_user_site()
info_dict['env_vars'] = {ev: os.getenv(ev, '<not set>') for ev in evars}
else:
print("sys.version: %s..." % (sys.version[:40]))
print("sys.prefix: %s" % sys.prefix)
print("sys.executable: %s" % sys.executable)
print("conda location: %s" % dirname(conda.__file__))
for cmd in sorted(set(find_commands() + ['build'])):
print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
print("user site dirs: ", end='')
if site_dirs:
print(site_dirs[0])
else:
print()
for site_dir in site_dirs[1:]:
print(' %s' % site_dir)
print()
for ev in sorted(evars):
print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
print()
if args.license and not context.json:
try:
from _license import show_info
show_info()
except ImportError:
print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")
if context.json:
stdout_json(info_dict)
<|code_end|>
conda/common/url.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import re
import socket
import sys
from getpass import getpass
from logging import getLogger
from os.path import abspath, expanduser
try:
# Python 3
from urllib.parse import (quote, quote_plus, unquote, unquote_plus, # NOQA
urlunparse as stdlib_urlparse, urljoin) # NOQA
from urllib.request import pathname2url # NOQA
except ImportError:
# Python 2
from urllib import quote, quote_plus, unquote, unquote_plus, pathname2url # NOQA
from urlparse import urlunparse as stdlib_urlparse, urljoin # NOQA
from requests.packages.urllib3.exceptions import LocationParseError
from requests.packages.urllib3.util.url import Url, parse_url
from .._vendor.auxlib.decorators import memoize
log = getLogger(__name__)
on_win = bool(sys.platform == "win32")
@memoize
def path_to_url(path):
path = abspath(expanduser(path))
url = urljoin('file:', pathname2url(path))
log.debug("%s converted to %s", path, url)
return url
def url_to_path(url): # NOQA
"""Convert a file:// URL to a path."""
assert url.startswith('file:'), "You can only turn file: urls into filenames (not %r)" % url
path = url[len('file:'):].lstrip('/')
path = unquote(path)
if re.match('^([a-z])[:|]', path, re.I):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
@memoize
def urlparse(url):
if on_win and url.startswith('file:'):
url.replace('\\', '/')
return parse_url(url)
def url_to_s3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = parse_url(url)
assert parsed_url.scheme == 's3', "You can only use s3: urls (not %r)" % url
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
def is_url(url):
try:
p = urlparse(url)
return p.netloc is not None or p.scheme == "file"
except LocationParseError:
log.debug("Could not parse url ({0}).".format(url))
return False
def is_ipv4_address(string_ip):
"""
Examples:
>>> [is_ipv4_address(ip) for ip in ('8.8.8.8', '192.168.10.10', '255.255.255.255')]
[True, True, True]
>>> [is_ipv4_address(ip) for ip in ('8.8.8', '192.168.10.10.20', '256.255.255.255', '::1')]
[False, False, False, False]
"""
try:
socket.inet_aton(string_ip)
except socket.error:
return False
return string_ip.count('.') == 3
def is_ipv6_address(string_ip):
"""
Examples:
>>> [is_ipv6_address(ip) for ip in ('::1', '2001:db8:85a3::370:7334', '1234:'*7+'1234')]
[True, True, True]
>>> [is_ipv6_address(ip) for ip in ('192.168.10.10', '1234:'*8+'1234')]
[False, False]
"""
try:
socket.inet_pton(socket.AF_INET6, string_ip)
except socket.error:
return False
return True
def is_ip_address(string_ip):
"""
Examples:
>>> is_ip_address('192.168.10.10')
True
>>> is_ip_address('::1')
True
>>> is_ip_address('www.google.com')
False
"""
return is_ipv4_address(string_ip) or is_ipv6_address(string_ip)
def join(*args):
start = '/' if not args[0] or args[0].startswith('/') else ''
return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y)
join_url = join
def has_scheme(value):
return re.match(r'[a-z][a-z0-9]{0,11}://', value)
def strip_scheme(url):
return url.split('://', 1)[-1]
def split_anaconda_token(url):
"""
Examples:
>>> split_anaconda_token("https://1.2.3.4/t/tk-123-456/path")
(u'https://1.2.3.4/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/t//path")
(u'https://1.2.3.4/path', u'')
>>> split_anaconda_token("https://some.domain/api/t/tk-123-456/path")
(u'https://some.domain/api/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/conda/t/tk-123-456/path")
(u'https://1.2.3.4/conda/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/path")
(u'https://1.2.3.4/path', None)
>>> split_anaconda_token("https://10.2.3.4:8080/conda/t/tk-123-45")
(u'https://10.2.3.4:8080/conda', u'tk-123-45')
"""
_token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url)
token = _token_match.groups()[0] if _token_match else None
cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url
return cleaned_url.rstrip('/'), token
def split_platform(url):
"""
Examples:
>>> split_platform("https://1.2.3.4/t/tk-123/osx-64/path")
(u'https://1.2.3.4/t/tk-123/path', u'osx-64')
"""
from conda.base.constants import PLATFORM_DIRECTORIES
_platform_match_regex = r'/(%s)/?' % r'|'.join(r'%s' % d for d in PLATFORM_DIRECTORIES)
_platform_match = re.search(_platform_match_regex, url, re.IGNORECASE)
platform = _platform_match.groups()[0] if _platform_match else None
cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url
return cleaned_url.rstrip('/'), platform
def split_package_filename(url):
cleaned_url, package_filename = (url.rsplit('/', 1) if url.endswith(('.tar.bz2', '.json'))
else (url, None))
return cleaned_url, package_filename
def split_scheme_auth_token(url):
if not url:
return None, None, None, None
cleaned_url, token = split_anaconda_token(url)
url_parts = urlparse(cleaned_url)
remainder_url = Url(host=url_parts.host, port=url_parts.port, path=url_parts.path,
query=url_parts.query).url
return remainder_url, url_parts.scheme, url_parts.auth, token
def split_conda_url_easy_parts(url):
# scheme, auth, token, platform, package_filename, host, port, path, query
cleaned_url, token = split_anaconda_token(url)
cleaned_url, platform = split_platform(cleaned_url)
cleaned_url, package_filename = split_package_filename(cleaned_url)
# TODO: split out namespace using regex
url_parts = urlparse(cleaned_url)
return (url_parts.scheme, url_parts.auth, token, platform, package_filename, url_parts.host,
url_parts.port, url_parts.path, url_parts.query)
def is_windows_path(value):
return re.match(r'[a-z]:[/\\]', value, re.IGNORECASE)
@memoize
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass("Password:")
return username, passwd
def add_username_and_password(url, username, password):
url_parts = parse_url(url)._asdict()
url_parts['auth'] = username + ':' + quote(password, '')
return Url(**url_parts).url
def maybe_add_auth(url, auth, force=False):
"""add auth if the url doesn't currently have it"""
if not auth:
return url
url_parts = urlparse(url)._asdict()
if url_parts['auth'] and not force:
return url
url_parts['auth'] = auth
return Url(**url_parts).url
if __name__ == "__main__":
import doctest
doctest.testmod()
<|code_end|>
| conda/cli/main_info.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
import json
import os
import re
import sys
from collections import OrderedDict
from conda.common.url import mask_anaconda_token
from os import listdir
from os.path import exists, expanduser, join
from .common import (add_parser_json, add_parser_offline, arg2spec, disp_features,
handle_envs_list, stdout_json)
from ..compat import itervalues
from ..config import rc_path, sys_rc_path, user_rc_path
from ..models.channel import prioritize_channels
from ..utils import on_win
help = "Display information about current conda install."
example = """
Examples:
conda info -a
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'info',
description=help,
help=help,
epilog=example,
)
add_parser_json(p)
add_parser_offline(p)
p.add_argument(
'-a', "--all",
action="store_true",
help="Show all information, (environments, license, and system "
"information.")
p.add_argument(
'-e', "--envs",
action="store_true",
help="List all known conda environments.",
)
p.add_argument(
'-l', "--license",
action="store_true",
help="Display information about the local conda licenses list.",
)
p.add_argument(
'-s', "--system",
action="store_true",
help="List environment variables.",
)
p.add_argument(
'packages',
action="store",
nargs='*',
help="Display information about packages.",
)
p.add_argument(
'--root',
action='store_true',
help='Display root environment path.',
)
p.add_argument(
'--unsafe-channels',
action='store_true',
help='Display list of channels with tokens exposed.',
)
p.set_defaults(func=execute)
def show_pkg_info(name):
from conda.api import get_index
from conda.resolve import Resolve
index = get_index()
r = Resolve(index)
print(name)
if name in r.groups:
for pkg in sorted(r.get_pkgs(name)):
print(' %-15s %15s %s' % (
pkg.version,
pkg.build,
disp_features(r.features(pkg.fn))))
else:
print(' not available')
# TODO
python_re = re.compile('python\d\.\d')
def get_user_site():
site_dirs = []
if not on_win:
if exists(expanduser('~/.local/lib')):
for path in listdir(expanduser('~/.local/lib/')):
if python_re.match(path):
site_dirs.append("~/.local/lib/%s" % path)
else:
if 'APPDATA' not in os.environ:
return site_dirs
APPDATA = os.environ['APPDATA']
if exists(join(APPDATA, 'Python')):
site_dirs = [join(APPDATA, 'Python', i) for i in
listdir(join(APPDATA, 'PYTHON'))]
return site_dirs
def pretty_package(pkg):
from conda.utils import human_bytes
from conda.models.channel import Channel
d = OrderedDict([
('file name', pkg.fn),
('name', pkg.name),
('version', pkg.version),
('build number', pkg.build_number),
('build string', pkg.build),
('channel', Channel(pkg.channel).canonical_name),
('size', human_bytes(pkg.info['size'])),
])
rest = pkg.info
for key in sorted(rest):
if key in {'build', 'depends', 'requires', 'channel', 'name',
'version', 'build_number', 'size'}:
continue
d[key] = rest[key]
print()
header = "%s %s %s" % (d['name'], d['version'], d['build string'])
print(header)
print('-'*len(header))
for key in d:
print("%-12s: %s" % (key, d[key]))
print('dependencies:')
for dep in pkg.info['depends']:
print(' %s' % dep)
def execute(args, parser):
import os
from os.path import dirname
import conda
from conda.base.context import context
from conda.models.channel import offline_keep
from conda.resolve import Resolve
from conda.api import get_index
if args.root:
if context.json:
stdout_json({'root_prefix': context.root_dir})
else:
print(context.root_dir)
return
if args.packages:
index = get_index()
r = Resolve(index)
if context.json:
stdout_json({
package: [p._asdict()
for p in sorted(r.get_pkgs(arg2spec(package)))]
for package in args.packages
})
else:
for package in args.packages:
versions = r.get_pkgs(arg2spec(package))
for pkg in sorted(versions):
pretty_package(pkg)
return
options = 'envs', 'system', 'license'
try:
from conda.install import linked_data
root_pkgs = linked_data(sys.prefix)
except:
root_pkgs = None
try:
import requests
requests_version = requests.__version__
except ImportError:
requests_version = "could not import"
except Exception as e:
requests_version = "Error %s" % e
try:
import conda_env
conda_env_version = conda_env.__version__
except:
try:
cenv = [p for p in itervalues(root_pkgs) if p['name'] == 'conda-env']
conda_env_version = cenv[0]['version']
except:
conda_env_version = "not installed"
try:
import conda_build
except ImportError:
conda_build_version = "not installed"
except Exception as e:
conda_build_version = "Error %s" % e
else:
conda_build_version = conda_build.__version__
channels = context.channels
if args.unsafe_channels:
if not context.json:
print("\n".join(channels))
else:
print(json.dumps({"channels": channels}))
return 0
channels = list(prioritize_channels(channels).keys())
if not context.json:
channels = [c + ('' if offline_keep(c) else ' (offline)')
for c in channels]
channels = [mask_anaconda_token(c) for c in channels]
info_dict = dict(
platform=context.subdir,
conda_version=conda.__version__,
conda_env_version=conda_env_version,
conda_build_version=conda_build_version,
root_prefix=context.root_dir,
conda_prefix=context.conda_prefix,
conda_private=context.conda_private,
root_writable=context.root_writable,
pkgs_dirs=context.pkgs_dirs,
envs_dirs=context.envs_dirs,
default_prefix=context.default_prefix,
channels=channels,
rc_path=rc_path,
user_rc_path=user_rc_path,
sys_rc_path=sys_rc_path,
# is_foreign=bool(foreign),
offline=context.offline,
envs=[],
python_version='.'.join(map(str, sys.version_info)),
requests_version=requests_version,
)
if args.all or context.json:
for option in options:
setattr(args, option, True)
if args.all or all(not getattr(args, opt) for opt in options):
for key in 'pkgs_dirs', 'envs_dirs', 'channels':
info_dict['_' + key] = ('\n' + 26 * ' ').join(info_dict[key])
info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else
'read only')
print("""\
Current conda install:
platform : %(platform)s
conda version : %(conda_version)s
conda is private : %(conda_private)s
conda-env version : %(conda_env_version)s
conda-build version : %(conda_build_version)s
python version : %(python_version)s
requests version : %(requests_version)s
root environment : %(root_prefix)s (%(_rtwro)s)
default environment : %(default_prefix)s
envs directories : %(_envs_dirs)s
package cache : %(_pkgs_dirs)s
channel URLs : %(_channels)s
config file : %(rc_path)s
offline mode : %(offline)s
""" % info_dict)
if args.envs:
handle_envs_list(info_dict['envs'], not context.json)
if args.system:
from conda.cli.find_commands import find_commands, find_executable
site_dirs = get_user_site()
evars = ['PATH', 'PYTHONPATH', 'PYTHONHOME', 'CONDA_DEFAULT_ENV',
'CIO_TEST', 'CONDA_ENVS_PATH']
if context.platform == 'linux':
evars.append('LD_LIBRARY_PATH')
elif context.platform == 'osx':
evars.append('DYLD_LIBRARY_PATH')
if context.json:
info_dict['sys.version'] = sys.version
info_dict['sys.prefix'] = sys.prefix
info_dict['sys.executable'] = sys.executable
info_dict['site_dirs'] = get_user_site()
info_dict['env_vars'] = {ev: os.getenv(ev, '<not set>') for ev in evars}
else:
print("sys.version: %s..." % (sys.version[:40]))
print("sys.prefix: %s" % sys.prefix)
print("sys.executable: %s" % sys.executable)
print("conda location: %s" % dirname(conda.__file__))
for cmd in sorted(set(find_commands() + ['build'])):
print("conda-%s: %s" % (cmd, find_executable('conda-' + cmd)))
print("user site dirs: ", end='')
if site_dirs:
print(site_dirs[0])
else:
print()
for site_dir in site_dirs[1:]:
print(' %s' % site_dir)
print()
for ev in sorted(evars):
print("%s: %s" % (ev, os.getenv(ev, '<not set>')))
print()
if args.license and not context.json:
try:
from _license import show_info
show_info()
except ImportError:
print("""\
WARNING: could not import _license.show_info
# try:
# $ conda install -n root _license""")
if context.json:
stdout_json(info_dict)
conda/common/url.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import re
import socket
import sys
from getpass import getpass
from logging import getLogger
from os.path import abspath, expanduser
try:
# Python 3
from urllib.parse import (quote, quote_plus, unquote, unquote_plus, # NOQA
urlunparse as stdlib_urlparse, urljoin) # NOQA
from urllib.request import pathname2url # NOQA
except ImportError:
# Python 2
from urllib import quote, quote_plus, unquote, unquote_plus, pathname2url # NOQA
from urlparse import urlunparse as stdlib_urlparse, urljoin # NOQA
from requests.packages.urllib3.exceptions import LocationParseError
from requests.packages.urllib3.util.url import Url, parse_url
from .._vendor.auxlib.decorators import memoize
log = getLogger(__name__)
on_win = bool(sys.platform == "win32")
@memoize
def path_to_url(path):
path = abspath(expanduser(path))
url = urljoin('file:', pathname2url(path))
log.debug("%s converted to %s", path, url)
return url
def url_to_path(url): # NOQA
"""Convert a file:// URL to a path."""
assert url.startswith('file:'), "You can only turn file: urls into filenames (not %r)" % url
path = url[len('file:'):].lstrip('/')
path = unquote(path)
if re.match('^([a-z])[:|]', path, re.I):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
@memoize
def urlparse(url):
if on_win and url.startswith('file:'):
url.replace('\\', '/')
return parse_url(url)
def url_to_s3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = parse_url(url)
assert parsed_url.scheme == 's3', "You can only use s3: urls (not %r)" % url
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
def is_url(url):
try:
p = urlparse(url)
return p.netloc is not None or p.scheme == "file"
except LocationParseError:
log.debug("Could not parse url ({0}).".format(url))
return False
def is_ipv4_address(string_ip):
"""
Examples:
>>> [is_ipv4_address(ip) for ip in ('8.8.8.8', '192.168.10.10', '255.255.255.255')]
[True, True, True]
>>> [is_ipv4_address(ip) for ip in ('8.8.8', '192.168.10.10.20', '256.255.255.255', '::1')]
[False, False, False, False]
"""
try:
socket.inet_aton(string_ip)
except socket.error:
return False
return string_ip.count('.') == 3
def is_ipv6_address(string_ip):
"""
Examples:
>>> [is_ipv6_address(ip) for ip in ('::1', '2001:db8:85a3::370:7334', '1234:'*7+'1234')]
[True, True, True]
>>> [is_ipv6_address(ip) for ip in ('192.168.10.10', '1234:'*8+'1234')]
[False, False]
"""
try:
socket.inet_pton(socket.AF_INET6, string_ip)
except socket.error:
return False
return True
def is_ip_address(string_ip):
"""
Examples:
>>> is_ip_address('192.168.10.10')
True
>>> is_ip_address('::1')
True
>>> is_ip_address('www.google.com')
False
"""
return is_ipv4_address(string_ip) or is_ipv6_address(string_ip)
def join(*args):
start = '/' if not args[0] or args[0].startswith('/') else ''
return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y)
join_url = join
def has_scheme(value):
return re.match(r'[a-z][a-z0-9]{0,11}://', value)
def strip_scheme(url):
return url.split('://', 1)[-1]
def mask_anaconda_token(url):
_, token = split_anaconda_token(url)
return url.replace(token, "<TOKEN>", 1) if token else url
def split_anaconda_token(url):
"""
Examples:
>>> split_anaconda_token("https://1.2.3.4/t/tk-123-456/path")
(u'https://1.2.3.4/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/t//path")
(u'https://1.2.3.4/path', u'')
>>> split_anaconda_token("https://some.domain/api/t/tk-123-456/path")
(u'https://some.domain/api/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/conda/t/tk-123-456/path")
(u'https://1.2.3.4/conda/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/path")
(u'https://1.2.3.4/path', None)
>>> split_anaconda_token("https://10.2.3.4:8080/conda/t/tk-123-45")
(u'https://10.2.3.4:8080/conda', u'tk-123-45')
"""
_token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url)
token = _token_match.groups()[0] if _token_match else None
cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url
return cleaned_url.rstrip('/'), token
def split_platform(url):
"""
Examples:
>>> split_platform("https://1.2.3.4/t/tk-123/osx-64/path")
(u'https://1.2.3.4/t/tk-123/path', u'osx-64')
"""
from conda.base.constants import PLATFORM_DIRECTORIES
_platform_match_regex = r'/(%s)/?' % r'|'.join(r'%s' % d for d in PLATFORM_DIRECTORIES)
_platform_match = re.search(_platform_match_regex, url, re.IGNORECASE)
platform = _platform_match.groups()[0] if _platform_match else None
cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url
return cleaned_url.rstrip('/'), platform
def split_package_filename(url):
cleaned_url, package_filename = (url.rsplit('/', 1) if url.endswith(('.tar.bz2', '.json'))
else (url, None))
return cleaned_url, package_filename
def split_scheme_auth_token(url):
if not url:
return None, None, None, None
cleaned_url, token = split_anaconda_token(url)
url_parts = urlparse(cleaned_url)
remainder_url = Url(host=url_parts.host, port=url_parts.port, path=url_parts.path,
query=url_parts.query).url
return remainder_url, url_parts.scheme, url_parts.auth, token
def split_conda_url_easy_parts(url):
# scheme, auth, token, platform, package_filename, host, port, path, query
cleaned_url, token = split_anaconda_token(url)
cleaned_url, platform = split_platform(cleaned_url)
cleaned_url, package_filename = split_package_filename(cleaned_url)
# TODO: split out namespace using regex
url_parts = urlparse(cleaned_url)
return (url_parts.scheme, url_parts.auth, token, platform, package_filename, url_parts.host,
url_parts.port, url_parts.path, url_parts.query)
def is_windows_path(value):
return re.match(r'[a-z]:[/\\]', value, re.IGNORECASE)
@memoize
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass("Password:")
return username, passwd
def add_username_and_password(url, username, password):
url_parts = parse_url(url)._asdict()
url_parts['auth'] = username + ':' + quote(password, '')
return Url(**url_parts).url
def maybe_add_auth(url, auth, force=False):
"""add auth if the url doesn't currently have it"""
if not auth:
return url
url_parts = urlparse(url)._asdict()
if url_parts['auth'] and not force:
return url
url_parts['auth'] = auth
return Url(**url_parts).url
if __name__ == "__main__":
import doctest
doctest.testmod()
| conda/cli/main_info.py
--- a/conda/cli/main_info.py
+++ b/conda/cli/main_info.py
@@ -11,6 +11,7 @@
import re
import sys
from collections import OrderedDict
+from conda.common.url import mask_anaconda_token
from os import listdir
from os.path import exists, expanduser, join
@@ -225,6 +226,7 @@ def execute(args, parser):
if not context.json:
channels = [c + ('' if offline_keep(c) else ' (offline)')
for c in channels]
+ channels = [mask_anaconda_token(c) for c in channels]
info_dict = dict(
platform=context.subdir,
conda/common/url.py
--- a/conda/common/url.py
+++ b/conda/common/url.py
@@ -135,6 +135,11 @@ def strip_scheme(url):
return url.split('://', 1)[-1]
+def mask_anaconda_token(url):
+ _, token = split_anaconda_token(url)
+ return url.replace(token, "<TOKEN>", 1) if token else url
+
+
def split_anaconda_token(url):
"""
Examples: |
conda appears to be stuck during update/install: Access Denied Error on Windows
Hi,
At work we have on occasion noticed that some of our build workers were taking unusually long during some `conda update` and `conda install` operations.
After enabling debugging with `-vv` and `-debug`, we identified that Conda was having trouble removing the `pkgs/.trash` folder. Here's the output we were seeing:
```
10:49:00 INFO conda.common.disk:delete_trash(235): Could not delete path in trash dir W:\Miniconda\pkgs\.trash\b095ce24-6c5c-4df1-aeb1-b726106d1f2b
10:49:00 WindowsError(5, 'Access is denied')
10:49:00 DEBUG conda.common.disk:exp_backoff_fn(154): WindowsError(5, 'Access is denied')
10:49:00 DEBUG conda.common.disk:exp_backoff_fn(163): retrying disk.py/59 <lambda>() in 0.147886 sec
10:49:00 DEBUG conda.common.disk:exp_backoff_fn(154): WindowsError(5, 'Access is denied')
10:49:00 DEBUG conda.common.disk:exp_backoff_fn(163): retrying disk.py/59 <lambda>() in 0.252876 sec
10:49:00 DEBUG conda.common.disk:exp_backoff_fn(154): WindowsError(5, 'Access is denied')
10:49:00 DEBUG conda.common.disk:exp_backoff_fn(163): retrying disk.py/59 <lambda>() in 0.43156 sec
10:49:00 DEBUG conda.common.disk:exp_backoff_fn(154): WindowsError(5, 'Access is denied')
10:49:00 DEBUG conda.common.disk:exp_backoff_fn(163): retrying disk.py/59 <lambda>() in 0.834403 sec
10:49:01 DEBUG conda.common.disk:exp_backoff_fn(154): WindowsError(5, 'Access is denied')
10:49:01 DEBUG conda.common.disk:exp_backoff_fn(163): retrying disk.py/59 <lambda>() in 1.69182 sec
10:49:03 DEBUG conda.common.disk:exp_backoff_fn(154): WindowsError(5, 'Access is denied')
10:49:03 DEBUG conda.common.disk:exp_backoff_fn(163): retrying disk.py/59 <lambda>() in 3.20222 sec
10:49:06 DEBUG conda.common.disk:exp_backoff_fn(154): WindowsError(5, 'Access is denied')
10:49:06 INFO conda.common.disk:delete_trash(235): Could not delete path in trash dir W:\Miniconda\pkgs\.trash\b1e43e9c-d723-4323-8ceb-3b834d67e4e4
10:49:06 WindowsError(5, 'Access is denied')
```
This could go on for dozens of minutes.
Looking at this worker we noticed a few things:
- `conda` was the only Python process running at the time;
- Looking at code we noticed the docs for `exp_backoff_fn`:
``` python
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
```
Some issues (#2947, #2983) also mention virus scanners, so we disabled the virus scanner we have ("ESET Endpoint Antivirus") but the problem persisted.
Here's the output of `conda info`:
```
Current conda install:
platform : win-64
conda version : 4.2.9+1
conda is private : False
conda-env version : 4.2.9+1
conda-build version : 1.20.0+1
python version : 2.7.12.final.0
requests version : 2.11.1
root environment : W:\Miniconda (writable)
default environment : W:\Miniconda
envs directories : W:\Miniconda\envs
package cache : W:\Miniconda\pkgs
channel URLs : https://eden.esss.com.br/conda-channel/esss/win-64/
https://eden.esss.com.br/conda-channel/esss/noarch/
https://eden.esss.com.br/conda-channel/mirror/win-64/
https://eden.esss.com.br/conda-channel/mirror/noarch/
https://eden.esss.com.br/conda-channel/mirror-conda-forge/win-64/
https://eden.esss.com.br/conda-channel/mirror-conda-forge/noarch/
config file : W:\Miniconda\.condarc
offline mode : False
```
Our current workaround was to remove the `pkgs/.trash` folder just before calling `conda update` or `conda install`.
Any ideas on how to debug this perhaps? Any help is greatly appreciated.
| conda/common/disk.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from errno import EACCES, EEXIST, ENOENT, EPERM
from itertools import chain
from logging import getLogger
from os import W_OK, access, chmod, getpid, listdir, lstat, makedirs, rename, unlink, walk
from os.path import abspath, basename, dirname, isdir, isfile, islink, join, lexists
from shutil import rmtree
from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISLNK, S_ISREG, S_IWRITE
from time import sleep
from uuid import uuid4
from ..compat import lchmod, text_type
from ..utils import on_win
__all__ = ["rm_rf", "exp_backoff_fn", "try_write"]
log = getLogger(__name__)
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
Args:
dir_path (str): directory to test write access
heavy (bool): Actually create and delete a file, or do a faster os.access test.
https://docs.python.org/dev/library/os.html?highlight=xattr#os.access
Returns:
bool
"""
if not isdir(dir_path):
return False
if on_win or heavy:
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
backoff_unlink(temp_filename)
return True
except (IOError, OSError):
return False
finally:
backoff_unlink(temp_filename)
else:
return access(dir_path, W_OK)
def backoff_unlink(file_or_symlink_path):
def _unlink(path):
make_writable(path)
unlink(path)
try:
exp_backoff_fn(lambda f: lexists(f) and _unlink(f), file_or_symlink_path)
except (IOError, OSError) as e:
if e.errno not in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
def backoff_rmdir(dirpath):
if not isdir(dirpath):
return
# shutil.rmtree:
# if onerror is set, it is called to handle the error with arguments (func, path, exc_info)
# where func is os.listdir, os.remove, or os.rmdir;
# path is the argument to that function that caused it to fail; and
# exc_info is a tuple returned by sys.exc_info() ==> (type, value, traceback).
def retry(func, path, exc_info):
if getattr(exc_info[1], 'errno', None) == ENOENT:
return
recursive_make_writable(dirname(path))
func(path)
def _rmdir(path):
try:
recursive_make_writable(path)
exp_backoff_fn(rmtree, path, onerror=retry)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
for root, dirs, files in walk(dirpath, topdown=False):
for file in files:
backoff_unlink(join(root, file))
for dir in dirs:
_rmdir(join(root, dir))
_rmdir(dirpath)
def make_writable(path):
try:
mode = lstat(path).st_mode
if S_ISDIR(mode):
chmod(path, S_IMODE(mode) | S_IWRITE | S_IEXEC)
elif S_ISREG(mode):
chmod(path, S_IMODE(mode) | S_IWRITE)
elif S_ISLNK(mode):
lchmod(path, S_IMODE(mode) | S_IWRITE)
else:
log.debug("path cannot be made writable: %s", path)
except Exception as e:
eno = getattr(e, 'errno', None)
if eno in (ENOENT,):
log.debug("tried to make writable, but didn't exist: %s", path)
raise
elif eno in (EACCES, EPERM):
log.debug("tried make writable but failed: %s\n%r", path, e)
else:
log.warn("Error making path writable: %s\n%r", path, e)
raise
def recursive_make_writable(path):
# The need for this function was pointed out at
# https://github.com/conda/conda/issues/3266#issuecomment-239241915
# Especially on windows, file removal will often fail because it is marked read-only
if isdir(path):
for root, dirs, files in walk(path):
for path in chain.from_iterable((files, dirs)):
try:
exp_backoff_fn(make_writable, join(root, path))
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
else:
exp_backoff_fn(make_writable, path)
def exp_backoff_fn(fn, *args, **kwargs):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args, **kwargs)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
max_tries = 7
for n in range(max_tries):
try:
result = fn(*args, **kwargs)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (EPERM, EACCES):
if n == max_tries-1:
raise
sleep_time = ((2 ** n) + random.random()) * 0.1
caller_frame = sys._getframe(1)
log.debug("retrying %s/%s %s() in %g sec",
basename(caller_frame.f_code.co_filename),
caller_frame.f_lineno, fn.__name__,
sleep_time)
sleep(sleep_time)
elif e.errno in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
else:
log.warn("Uncaught backoff with errno %d", e.errno)
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is 5. This only applies
to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
try:
path = abspath(path)
log.debug("rm_rf %s", path)
if isdir(path):
try:
# On Windows, always move to trash first.
if trash and on_win:
move_result = move_path_to_trash(path, preclean=False)
if move_result:
return True
backoff_rmdir(path)
finally:
# If path was removed, ensure it's not in linked_data_
if islink(path) or isfile(path):
from conda.install import delete_linked_data_any
delete_linked_data_any(path)
if lexists(path):
try:
backoff_unlink(path)
return True
except (OSError, IOError) as e:
log.debug("%r errno %d\nCannot unlink %s.", e, e.errno, path)
if trash:
move_result = move_path_to_trash(path)
if move_result:
return True
log.info("Failed to remove %s.", path)
else:
log.debug("rm_rf failed. Not a link, file, or directory: %s", path)
return True
finally:
if lexists(path):
log.info("rm_rf failed for %s", path)
return False
def delete_trash(prefix=None):
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not lexists(trash_dir):
log.debug("Trash directory %s doesn't exist. Moving on.", trash_dir)
continue
log.debug("removing trash for %s", trash_dir)
for p in listdir(trash_dir):
path = join(trash_dir, p)
try:
if isdir(path):
backoff_rmdir(path)
else:
backoff_unlink(path)
except (IOError, OSError) as e:
log.info("Could not delete path in trash dir %s\n%r", path, e)
if listdir(trash_dir):
log.info("Unable to clean trash directory %s", trash_dir)
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
makedirs(trash_dir)
except (IOError, OSError) as e1:
if e1.errno != EEXIST:
continue
trash_file = join(trash_dir, text_type(uuid4()))
try:
rename(path, trash_file)
except (IOError, OSError) as e:
log.debug("Could not move %s to %s.\n%r", path, trash_file, e)
else:
log.debug("Moved to trash: %s", path)
from ..install import delete_linked_data_any
delete_linked_data_any(path)
return True
return False
<|code_end|>
| conda/common/disk.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from errno import EACCES, EEXIST, ENOENT, EPERM
from itertools import chain
from logging import getLogger
from os import W_OK, access, chmod, getpid, listdir, lstat, makedirs, rename, unlink, walk
from os.path import abspath, basename, dirname, isdir, isfile, islink, join, lexists
from shutil import rmtree
from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISLNK, S_ISREG, S_IWRITE
from time import sleep
from uuid import uuid4
from ..compat import lchmod, text_type
from ..utils import on_win
__all__ = ["rm_rf", "exp_backoff_fn", "try_write"]
log = getLogger(__name__)
MAX_TRIES = 7
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
Args:
dir_path (str): directory to test write access
heavy (bool): Actually create and delete a file, or do a faster os.access test.
https://docs.python.org/dev/library/os.html?highlight=xattr#os.access
Returns:
bool
"""
if not isdir(dir_path):
return False
if on_win or heavy:
# try to create a file to see if `dir_path` is writable, see #2151
temp_filename = join(dir_path, '.conda-try-write-%d' % getpid())
try:
with open(temp_filename, mode='wb') as fo:
fo.write(b'This is a test file.\n')
backoff_unlink(temp_filename)
return True
except (IOError, OSError):
return False
finally:
backoff_unlink(temp_filename)
else:
return access(dir_path, W_OK)
def backoff_unlink(file_or_symlink_path, max_tries=MAX_TRIES):
def _unlink(path):
make_writable(path)
unlink(path)
try:
exp_backoff_fn(lambda f: lexists(f) and _unlink(f), file_or_symlink_path,
max_tries=max_tries)
except (IOError, OSError) as e:
if e.errno not in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
def backoff_rmdir(dirpath, max_tries=MAX_TRIES):
if not isdir(dirpath):
return
# shutil.rmtree:
# if onerror is set, it is called to handle the error with arguments (func, path, exc_info)
# where func is os.listdir, os.remove, or os.rmdir;
# path is the argument to that function that caused it to fail; and
# exc_info is a tuple returned by sys.exc_info() ==> (type, value, traceback).
def retry(func, path, exc_info):
if getattr(exc_info[1], 'errno', None) == ENOENT:
return
recursive_make_writable(dirname(path), max_tries=max_tries)
func(path)
def _rmdir(path):
try:
recursive_make_writable(path)
exp_backoff_fn(rmtree, path, onerror=retry, max_tries=max_tries)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
for root, dirs, files in walk(dirpath, topdown=False):
for file in files:
backoff_unlink(join(root, file))
for dir in dirs:
_rmdir(join(root, dir))
_rmdir(dirpath)
def make_writable(path):
try:
mode = lstat(path).st_mode
if S_ISDIR(mode):
chmod(path, S_IMODE(mode) | S_IWRITE | S_IEXEC)
elif S_ISREG(mode):
chmod(path, S_IMODE(mode) | S_IWRITE)
elif S_ISLNK(mode):
lchmod(path, S_IMODE(mode) | S_IWRITE)
else:
log.debug("path cannot be made writable: %s", path)
except Exception as e:
eno = getattr(e, 'errno', None)
if eno in (ENOENT,):
log.debug("tried to make writable, but didn't exist: %s", path)
raise
elif eno in (EACCES, EPERM):
log.debug("tried make writable but failed: %s\n%r", path, e)
else:
log.warn("Error making path writable: %s\n%r", path, e)
raise
def recursive_make_writable(path, max_tries=MAX_TRIES):
# The need for this function was pointed out at
# https://github.com/conda/conda/issues/3266#issuecomment-239241915
# Especially on windows, file removal will often fail because it is marked read-only
if isdir(path):
for root, dirs, files in walk(path):
for path in chain.from_iterable((files, dirs)):
try:
exp_backoff_fn(make_writable, join(root, path), max_tries=max_tries)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
else:
exp_backoff_fn(make_writable, path, max_tries=max_tries)
def exp_backoff_fn(fn, *args, **kwargs):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
max_tries = kwargs.pop('max_tries', MAX_TRIES)
if not on_win:
return fn(*args, **kwargs)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
for n in range(max_tries):
try:
result = fn(*args, **kwargs)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (EPERM, EACCES):
if n == max_tries-1:
raise
sleep_time = ((2 ** n) + random.random()) * 0.1
caller_frame = sys._getframe(1)
log.debug("retrying %s/%s %s() in %g sec",
basename(caller_frame.f_code.co_filename),
caller_frame.f_lineno, fn.__name__,
sleep_time)
sleep(sleep_time)
elif e.errno in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
else:
log.warn("Uncaught backoff with errno %d", e.errno)
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is 5. This only applies
to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
try:
path = abspath(path)
log.debug("rm_rf %s", path)
if isdir(path):
try:
# On Windows, always move to trash first.
if trash and on_win:
move_result = move_path_to_trash(path, preclean=False)
if move_result:
return True
backoff_rmdir(path)
finally:
# If path was removed, ensure it's not in linked_data_
if islink(path) or isfile(path):
from conda.install import delete_linked_data_any
delete_linked_data_any(path)
if lexists(path):
try:
backoff_unlink(path)
return True
except (OSError, IOError) as e:
log.debug("%r errno %d\nCannot unlink %s.", e, e.errno, path)
if trash:
move_result = move_path_to_trash(path)
if move_result:
return True
log.info("Failed to remove %s.", path)
else:
log.debug("rm_rf failed. Not a link, file, or directory: %s", path)
return True
finally:
if lexists(path):
log.info("rm_rf failed for %s", path)
return False
def delete_trash(prefix=None):
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not lexists(trash_dir):
log.debug("Trash directory %s doesn't exist. Moving on.", trash_dir)
continue
log.debug("removing trash for %s", trash_dir)
for p in listdir(trash_dir):
path = join(trash_dir, p)
try:
if isdir(path):
backoff_rmdir(path, max_tries=1)
else:
backoff_unlink(path, max_tries=1)
except (IOError, OSError) as e:
log.info("Could not delete path in trash dir %s\n%r", path, e)
if listdir(trash_dir):
log.info("Unable to clean trash directory %s", trash_dir)
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
from ..base.context import context
for pkg_dir in context.pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
makedirs(trash_dir)
except (IOError, OSError) as e1:
if e1.errno != EEXIST:
continue
trash_file = join(trash_dir, text_type(uuid4()))
try:
rename(path, trash_file)
except (IOError, OSError) as e:
log.debug("Could not move %s to %s.\n%r", path, trash_file, e)
else:
log.debug("Moved to trash: %s", path)
from ..install import delete_linked_data_any
delete_linked_data_any(path)
return True
return False
| conda/common/disk.py
--- a/conda/common/disk.py
+++ b/conda/common/disk.py
@@ -20,6 +20,8 @@
log = getLogger(__name__)
+MAX_TRIES = 7
+
def try_write(dir_path, heavy=False):
"""Test write access to a directory.
@@ -50,20 +52,21 @@ def try_write(dir_path, heavy=False):
return access(dir_path, W_OK)
-def backoff_unlink(file_or_symlink_path):
+def backoff_unlink(file_or_symlink_path, max_tries=MAX_TRIES):
def _unlink(path):
make_writable(path)
unlink(path)
try:
- exp_backoff_fn(lambda f: lexists(f) and _unlink(f), file_or_symlink_path)
+ exp_backoff_fn(lambda f: lexists(f) and _unlink(f), file_or_symlink_path,
+ max_tries=max_tries)
except (IOError, OSError) as e:
if e.errno not in (ENOENT,):
# errno.ENOENT File not found error / No such file or directory
raise
-def backoff_rmdir(dirpath):
+def backoff_rmdir(dirpath, max_tries=MAX_TRIES):
if not isdir(dirpath):
return
@@ -75,13 +78,13 @@ def backoff_rmdir(dirpath):
def retry(func, path, exc_info):
if getattr(exc_info[1], 'errno', None) == ENOENT:
return
- recursive_make_writable(dirname(path))
+ recursive_make_writable(dirname(path), max_tries=max_tries)
func(path)
def _rmdir(path):
try:
recursive_make_writable(path)
- exp_backoff_fn(rmtree, path, onerror=retry)
+ exp_backoff_fn(rmtree, path, onerror=retry, max_tries=max_tries)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
@@ -120,7 +123,7 @@ def make_writable(path):
raise
-def recursive_make_writable(path):
+def recursive_make_writable(path, max_tries=MAX_TRIES):
# The need for this function was pointed out at
# https://github.com/conda/conda/issues/3266#issuecomment-239241915
# Especially on windows, file removal will often fail because it is marked read-only
@@ -128,25 +131,25 @@ def recursive_make_writable(path):
for root, dirs, files in walk(path):
for path in chain.from_iterable((files, dirs)):
try:
- exp_backoff_fn(make_writable, join(root, path))
+ exp_backoff_fn(make_writable, join(root, path), max_tries=max_tries)
except (IOError, OSError) as e:
if e.errno == ENOENT:
log.debug("no such file or directory: %s", path)
else:
raise
else:
- exp_backoff_fn(make_writable, path)
+ exp_backoff_fn(make_writable, path, max_tries=max_tries)
def exp_backoff_fn(fn, *args, **kwargs):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
+ max_tries = kwargs.pop('max_tries', MAX_TRIES)
if not on_win:
return fn(*args, **kwargs)
import random
# with max_tries = 6, max total time ~= 3.2 sec
# with max_tries = 7, max total time ~= 6.5 sec
- max_tries = 7
for n in range(max_tries):
try:
result = fn(*args, **kwargs)
@@ -228,9 +231,9 @@ def delete_trash(prefix=None):
path = join(trash_dir, p)
try:
if isdir(path):
- backoff_rmdir(path)
+ backoff_rmdir(path, max_tries=1)
else:
- backoff_unlink(path)
+ backoff_unlink(path, max_tries=1)
except (IOError, OSError) as e:
log.info("Could not delete path in trash dir %s\n%r", path, e)
if listdir(trash_dir): |
conda env create giving ImportError for yaml package
`conda env create` suddenly started giving `"ImportError: No module named 'yaml'"` with latest miniconda on my TravisCI builbs: https://travis-ci.org/leouieda/website/builds/170917743
I changed nothing significant in my code. Tried rebuilding previous passing builds and started getting the same error.
Is this something from a recent release?
conda env create giving ImportError for yaml package
`conda env create` suddenly started giving `"ImportError: No module named 'yaml'"` with latest miniconda on my TravisCI builbs: https://travis-ci.org/leouieda/website/builds/170917743
I changed nothing significant in my code. Tried rebuilding previous passing builds and started getting the same error.
Is this something from a recent release?
| conda_env/yaml.py
<|code_start|>
"""
Wrapper around yaml to ensure that everything is ordered correctly.
This is based on the answer at http://stackoverflow.com/a/16782282
"""
from __future__ import absolute_import, print_function
from collections import OrderedDict
import yaml
def represent_ordereddict(dumper, data):
value = []
for item_key, item_value in data.items():
node_key = dumper.represent_data(item_key)
node_value = dumper.represent_data(item_value)
value.append((node_key, node_value))
return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value)
yaml.add_representer(OrderedDict, represent_ordereddict)
dump = yaml.dump
load = yaml.load
dict = OrderedDict
<|code_end|>
| conda_env/yaml.py
"""
Wrapper around yaml to ensure that everything is ordered correctly.
This is based on the answer at http://stackoverflow.com/a/16782282
"""
from __future__ import absolute_import, print_function
from collections import OrderedDict
from conda.common.yaml import get_yaml
yaml = get_yaml()
def represent_ordereddict(dumper, data):
value = []
for item_key, item_value in data.items():
node_key = dumper.represent_data(item_key)
node_value = dumper.represent_data(item_value)
value.append((node_key, node_value))
return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value)
yaml.add_representer(OrderedDict, represent_ordereddict)
dump = yaml.dump
load = yaml.load
dict = OrderedDict
| conda_env/yaml.py
--- a/conda_env/yaml.py
+++ b/conda_env/yaml.py
@@ -5,7 +5,9 @@
"""
from __future__ import absolute_import, print_function
from collections import OrderedDict
-import yaml
+
+from conda.common.yaml import get_yaml
+yaml = get_yaml()
def represent_ordereddict(dumper, data): |
Document that $CONDA_BLD_PATH must end with "/conda-bld" (and stop breaking stuff with minor releases)
On September 30, @kalefranz inserted a new assertion (`name == 'conda-bld'`) into `context.py` ([see here](https://github.com/conda/conda/blame/master/conda/base/context.py#L299)) causing `conda info` to fail when `CONDA_BLD_PATH` does not end with `/conda-bld`:
``` console
$ CONDA_BLD_PATH=/tmp conda info
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
`$ /home/user/conda/bin/conda info`
Traceback (most recent call last):
File "/home/user/conda/lib/python3.5/site-packages/conda/exceptions.py", line 479, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/home/user/conda/lib/python3.5/site-packages/conda/cli/main.py", line 145, in _main
exit_code = args.func(args, p)
File "/home/user/conda/lib/python3.5/site-packages/conda/cli/main_info.py", line 225, in execute
channels = list(prioritize_channels(channels).keys())
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 380, in prioritize_channels
channel = Channel(chn)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 161, in __call__
c = Channel.from_value(value)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 211, in from_value
return Channel.from_url(value)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 196, in from_url
return parse_conda_channel_url(url)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 132, in parse_conda_channel_url
configured_token) = _read_channel_configuration(scheme, host, port, path)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 97, in _read_channel_configuration
for name, channel in sorted(context.custom_channels.items(), reverse=True,
File "/home/user/conda/lib/python3.5/site-packages/conda/_vendor/auxlib/decorators.py", line 265, in new_fget
cache[inner_attname] = func(self)
File "/home/user/conda/lib/python3.5/site-packages/conda/base/context.py", line 265, in custom_channels
all_sources = self.default_channels, (self.local_build_root_channel,), custom_channels
File "/home/user/conda/lib/python3.5/site-packages/conda/_vendor/auxlib/decorators.py", line 265, in new_fget
cache[inner_attname] = func(self)
File "/home/user/conda/lib/python3.5/site-packages/conda/base/context.py", line 246, in local_build_root_channel
assert name == 'conda-bld'
AssertionError
```
This change was not documented and led to a hard to trace failure of our test suite (we set `$CONDA_BLD_PATH` in our test suite and `conda info` is, e.g., run by `conda build`).
Unfortunately, this was not the first time that conda/conda-build introduced subtle, but breaking changes in a patch release.
Document that $CONDA_BLD_PATH must end with "/conda-bld" (and stop breaking stuff with minor releases)
On September 30, @kalefranz inserted a new assertion (`name == 'conda-bld'`) into `context.py` ([see here](https://github.com/conda/conda/blame/master/conda/base/context.py#L299)) causing `conda info` to fail when `CONDA_BLD_PATH` does not end with `/conda-bld`:
``` console
$ CONDA_BLD_PATH=/tmp conda info
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
`$ /home/user/conda/bin/conda info`
Traceback (most recent call last):
File "/home/user/conda/lib/python3.5/site-packages/conda/exceptions.py", line 479, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/home/user/conda/lib/python3.5/site-packages/conda/cli/main.py", line 145, in _main
exit_code = args.func(args, p)
File "/home/user/conda/lib/python3.5/site-packages/conda/cli/main_info.py", line 225, in execute
channels = list(prioritize_channels(channels).keys())
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 380, in prioritize_channels
channel = Channel(chn)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 161, in __call__
c = Channel.from_value(value)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 211, in from_value
return Channel.from_url(value)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 196, in from_url
return parse_conda_channel_url(url)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 132, in parse_conda_channel_url
configured_token) = _read_channel_configuration(scheme, host, port, path)
File "/home/user/conda/lib/python3.5/site-packages/conda/models/channel.py", line 97, in _read_channel_configuration
for name, channel in sorted(context.custom_channels.items(), reverse=True,
File "/home/user/conda/lib/python3.5/site-packages/conda/_vendor/auxlib/decorators.py", line 265, in new_fget
cache[inner_attname] = func(self)
File "/home/user/conda/lib/python3.5/site-packages/conda/base/context.py", line 265, in custom_channels
all_sources = self.default_channels, (self.local_build_root_channel,), custom_channels
File "/home/user/conda/lib/python3.5/site-packages/conda/_vendor/auxlib/decorators.py", line 265, in new_fget
cache[inner_attname] = func(self)
File "/home/user/conda/lib/python3.5/site-packages/conda/base/context.py", line 246, in local_build_root_channel
assert name == 'conda-bld'
AssertionError
```
This change was not documented and led to a hard to trace failure of our test suite (we set `$CONDA_BLD_PATH` in our test suite and `conda info` is, e.g., run by `conda build`).
Unfortunately, this was not the first time that conda/conda-build introduced subtle, but breaking changes in a patch release.
| conda/base/context.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
import sys
from collections import Sequence
from itertools import chain
from logging import getLogger
from os.path import abspath, basename, dirname, expanduser, isdir, join
from platform import machine
from .constants import DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME, SEARCH_PATH, conda
from .._vendor.auxlib.compat import NoneType, string_types
from .._vendor.auxlib.decorators import memoizedproperty
from .._vendor.auxlib.ish import dals
from .._vendor.auxlib.path import expand
from ..common.compat import iteritems, odict
from ..common.configuration import (Configuration, LoadError, MapParameter, PrimitiveParameter,
SequenceParameter)
from ..common.disk import try_write
from ..common.url import has_scheme, path_to_url, split_scheme_auth_token, urlparse
from ..exceptions import CondaEnvironmentNotFoundError, CondaValueError
try:
from cytoolz.itertoolz import concat, concatv
except ImportError:
from .._vendor.toolz.itertoolz import concat, concatv
log = getLogger(__name__)
try:
import cio_test # NOQA
except ImportError:
log.info("No cio_test package found.")
_platform_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
_arch_names = {
32: 'x86',
64: 'x86_64',
}
def channel_alias_validation(value):
if value and not has_scheme(value):
return "channel_alias value '%s' must have scheme/protocol." % value
return True
class Context(Configuration):
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
auto_update_conda = PrimitiveParameter(True, aliases=('self_update',))
changeps1 = PrimitiveParameter(True)
create_default_packages = SequenceParameter(string_types)
disallow = SequenceParameter(string_types)
force_32bit = PrimitiveParameter(False)
track_features = SequenceParameter(string_types)
use_pip = PrimitiveParameter(True)
_root_dir = PrimitiveParameter(sys.prefix, aliases=('root_dir',))
# connection details
ssl_verify = PrimitiveParameter(True, parameter_type=string_types + (bool,))
client_tls_cert = PrimitiveParameter('', aliases=('client_cert',))
client_tls_cert_key = PrimitiveParameter('', aliases=('client_cert_key',))
proxy_servers = MapParameter(string_types)
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
_channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS,
aliases=('channel_alias',),
validation=channel_alias_validation)
# channels
channels = SequenceParameter(string_types, default=('defaults',))
_migrated_channel_aliases = SequenceParameter(string_types,
aliases=('migrated_channel_aliases',)) # TODO: also take a list of strings # NOQA
_default_channels = SequenceParameter(string_types, DEFAULT_CHANNELS,
aliases=('default_channels',))
_custom_channels = MapParameter(string_types, aliases=('custom_channels',))
migrated_custom_channels = MapParameter(string_types) # TODO: also take a list of strings
_custom_multichannels = MapParameter(Sequence, aliases=('custom_multichannels',))
# command line
always_copy = PrimitiveParameter(False, aliases=('copy',))
always_yes = PrimitiveParameter(False, aliases=('yes',))
channel_priority = PrimitiveParameter(True)
debug = PrimitiveParameter(False)
json = PrimitiveParameter(False)
offline = PrimitiveParameter(False)
quiet = PrimitiveParameter(False)
shortcuts = PrimitiveParameter(True)
show_channel_urls = PrimitiveParameter(None, parameter_type=(bool, NoneType))
update_dependencies = PrimitiveParameter(True, aliases=('update_deps',))
verbosity = PrimitiveParameter(0, aliases=('verbose',), parameter_type=int)
# conda_build
bld_path = PrimitiveParameter('')
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
_envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs', 'envs_path'),
string_delimiter=os.pathsep)
@property
def default_python(self):
ver = sys.version_info
return '%d.%d' % (ver.major, ver.minor)
@property
def arch_name(self):
m = machine()
if self.platform == 'linux' and m in non_x86_linux_machines:
return m
else:
return _arch_names[self.bits]
@property
def platform(self):
return _platform_map.get(sys.platform, 'unknown')
@property
def subdir(self):
m = machine()
if m in non_x86_linux_machines:
return 'linux-%s' % m
else:
return '%s-%d' % (self.platform, self.bits)
@property
def bits(self):
if self.force_32bit:
return 32
else:
return 8 * tuple.__itemsize__
@property
def local_build_root(self):
# TODO: import from conda_build, and fall back to something incredibly simple
if self.bld_path:
return expand(self.bld_path)
elif self.root_writable:
return join(self.conda_prefix, 'conda-bld')
else:
return expand('~/conda-bld')
@property
def root_dir(self):
# root_dir is an alias for root_prefix, we prefer the name "root_prefix"
# because it is more consistent with other names
return abspath(expanduser(self._root_dir))
@property
def root_writable(self):
return try_write(self.root_dir)
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
for p in concatv(self._envs_dirs,
(join(self.root_dir, 'envs'), )
if self.root_writable
else ('~/.conda/envs', join(self.root_dir, 'envs'))))
@property
def pkgs_dirs(self):
return [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in self.envs_dirs]
@property
def default_prefix(self):
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, ROOT_ENV_NAME):
return self.root_dir
elif os.sep in _default_env:
return abspath(_default_env)
else:
for envs_dir in self.envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
return default_prefix
return join(self.envs_dirs[0], _default_env)
@property
def prefix(self):
return get_prefix(self, self._argparse_args, False)
@property
def prefix_w_legacy_search(self):
return get_prefix(self, self._argparse_args, True)
@property
def clone_src(self):
assert self._argparse_args.clone is not None
return locate_prefix_by_name(self, self._argparse_args.clone)
@property
def conda_in_root(self):
return not conda_in_private_env()
@property
def conda_private(self):
return conda_in_private_env()
@property
def root_prefix(self):
return abspath(join(sys.prefix, '..', '..')) if conda_in_private_env() else sys.prefix
@property
def conda_prefix(self):
return sys.prefix
@memoizedproperty
def channel_alias(self):
from ..models.channel import Channel
location, scheme, auth, token = split_scheme_auth_token(self._channel_alias)
return Channel(scheme=scheme, auth=auth, location=location, token=token)
@property
def migrated_channel_aliases(self):
from ..models.channel import Channel
return tuple(Channel(scheme=scheme, auth=auth, location=location, token=token)
for location, scheme, auth, token in
(split_scheme_auth_token(c) for c in self._migrated_channel_aliases))
@memoizedproperty
def default_channels(self):
# the format for 'default_channels' is a list of strings that either
# - start with a scheme
# - are meant to be prepended with channel_alias
from ..models.channel import Channel
return tuple(Channel.make_simple_channel(self.channel_alias, v)
for v in self._default_channels)
@memoizedproperty
def local_build_root_channel(self):
from ..models.channel import Channel
url_parts = urlparse(path_to_url(self.local_build_root))
location, name = url_parts.path.rsplit('/', 1)
if not location:
location = '/'
assert name == 'conda-bld'
return Channel(scheme=url_parts.scheme, location=location, name=name)
@memoizedproperty
def custom_multichannels(self):
from ..models.channel import Channel
default_custom_multichannels = {
'defaults': self.default_channels,
'local': (self.local_build_root_channel,),
}
all_channels = default_custom_multichannels, self._custom_multichannels
return odict((name, tuple(Channel(v) for v in c))
for name, c in concat(map(iteritems, all_channels)))
@memoizedproperty
def custom_channels(self):
from ..models.channel import Channel
custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
for name, url in iteritems(self._custom_channels))
all_sources = self.default_channels, (self.local_build_root_channel,), custom_channels
all_channels = (ch for ch in concat(all_sources))
return odict((x.name, x) for x in all_channels)
def conda_in_private_env():
# conda is located in its own private environment named '_conda'
return basename(sys.prefix) == '_conda' and basename(dirname(sys.prefix)) == 'envs'
def reset_context(search_path=SEARCH_PATH, argparse_args=None):
context.__init__(search_path, conda, argparse_args)
from ..models.channel import Channel
Channel._reset_state()
return context
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(context.root_dir, 'envs')):
return join(context.root_dir, 'pkgs32' if context.force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
def get_help_dict():
# this is a function so that most of the time it's not evaluated and loaded into memory
return {
'add_pip_as_python_dependency': dals("""
"""),
'always_yes': dals("""
"""),
'always_copy': dals("""
"""),
'changeps1': dals("""
"""),
'use_pip': dals("""
Use pip when listing packages with conda list. Note that this does not affect any
conda command or functionality other than the output of the command conda list.
"""),
'binstar_upload': dals("""
"""),
'allow_softlinks': dals("""
"""),
'self_update': dals("""
"""),
'show_channel_urls': dals("""
# show channel URLs when displaying what is going to be downloaded
# None means letting conda decide
"""),
'update_dependencies': dals("""
"""),
'channel_priority': dals("""
"""),
'ssl_verify': dals("""
# ssl_verify can be a boolean value or a filename string
"""),
'client_tls_cert': dals("""
# client_tls_cert can be a path pointing to a single file
# containing the private key and the certificate (e.g. .pem),
# or use 'client_tls_cert_key' in conjuction with 'client_tls_cert' for
# individual files
"""),
'client_tls_cert_key': dals("""
# used in conjunction with 'client_tls_cert' for a matching key file
"""),
'track_features': dals("""
"""),
'channels': dals("""
"""),
'disallow': dals("""
# set packages disallowed to be installed
"""),
'create_default_packages': dals("""
# packages which are added to a newly created environment by default
"""),
'envs_dirs': dals("""
"""),
'default_channels': dals("""
"""),
'proxy_servers': dals("""
"""),
'force_32bit': dals("""
CONDA_FORCE_32BIT should only be used when running conda-build (in order
to build 32-bit packages on a 64-bit system). We don't want to mention it
in the documentation, because it can mess up a lot of things.
""")
}
def get_prefix(ctx, args, search=True):
"""Get the prefix to operate in
Args:
ctx: the context of conda
args: the argparse args from the command line
search: whether search for prefix
Returns: the prefix
Raises: CondaEnvironmentNotFoundError if the prefix is invalid
"""
if args.name:
if '/' in args.name:
raise CondaValueError("'/' not allowed in environment name: %s" %
args.name, getattr(args, 'json', False))
if args.name == ROOT_ENV_NAME:
return ctx.root_dir
if search:
return locate_prefix_by_name(ctx, args.name)
else:
return join(ctx.envs_dirs[0], args.name)
elif args.prefix:
return abspath(expanduser(args.prefix))
else:
return ctx.default_prefix
def locate_prefix_by_name(ctx, name):
""" Find the location of a prefix given a conda env name.
Args:
ctx (Context): the context object
name (str): the name of prefix to find
Returns:
str: the location of the prefix found, or CondaValueError will raise if not found
Raises:
CondaValueError: when no prefix is found
"""
if name == ROOT_ENV_NAME:
return ctx.root_dir
# look for a directory named `name` in all envs_dirs AND in CWD
for envs_dir in chain(ctx.envs_dirs + (os.getcwd(),)):
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
raise CondaEnvironmentNotFoundError(name)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(context.root_dir) and
not context.root_writable)
try:
context = Context(SEARCH_PATH, conda, None)
except LoadError as e:
print(e, file=sys.stderr)
# Exception handler isn't loaded so use sys.exit
sys.exit(1)
<|code_end|>
| conda/base/context.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
import sys
from collections import Sequence
from itertools import chain
from logging import getLogger
from os.path import abspath, basename, dirname, expanduser, isdir, join
from platform import machine
from .constants import DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME, SEARCH_PATH, conda
from .._vendor.auxlib.compat import NoneType, string_types
from .._vendor.auxlib.decorators import memoizedproperty
from .._vendor.auxlib.ish import dals
from .._vendor.auxlib.path import expand
from ..common.compat import iteritems, odict
from ..common.configuration import (Configuration, LoadError, MapParameter, PrimitiveParameter,
SequenceParameter)
from ..common.disk import try_write
from ..common.url import has_scheme, path_to_url, split_scheme_auth_token, urlparse
from ..exceptions import CondaEnvironmentNotFoundError, CondaValueError
try:
from cytoolz.itertoolz import concat, concatv
except ImportError:
from .._vendor.toolz.itertoolz import concat, concatv
log = getLogger(__name__)
try:
import cio_test # NOQA
except ImportError:
log.info("No cio_test package found.")
_platform_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
}
non_x86_linux_machines = {'armv6l', 'armv7l', 'ppc64le'}
_arch_names = {
32: 'x86',
64: 'x86_64',
}
def channel_alias_validation(value):
if value and not has_scheme(value):
return "channel_alias value '%s' must have scheme/protocol." % value
return True
class Context(Configuration):
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
auto_update_conda = PrimitiveParameter(True, aliases=('self_update',))
changeps1 = PrimitiveParameter(True)
create_default_packages = SequenceParameter(string_types)
disallow = SequenceParameter(string_types)
force_32bit = PrimitiveParameter(False)
track_features = SequenceParameter(string_types)
use_pip = PrimitiveParameter(True)
_root_dir = PrimitiveParameter(sys.prefix, aliases=('root_dir',))
# connection details
ssl_verify = PrimitiveParameter(True, parameter_type=string_types + (bool,))
client_tls_cert = PrimitiveParameter('', aliases=('client_cert',))
client_tls_cert_key = PrimitiveParameter('', aliases=('client_cert_key',))
proxy_servers = MapParameter(string_types)
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
_channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS,
aliases=('channel_alias',),
validation=channel_alias_validation)
# channels
channels = SequenceParameter(string_types, default=('defaults',))
_migrated_channel_aliases = SequenceParameter(string_types,
aliases=('migrated_channel_aliases',)) # TODO: also take a list of strings # NOQA
_default_channels = SequenceParameter(string_types, DEFAULT_CHANNELS,
aliases=('default_channels',))
_custom_channels = MapParameter(string_types, aliases=('custom_channels',))
migrated_custom_channels = MapParameter(string_types) # TODO: also take a list of strings
_custom_multichannels = MapParameter(Sequence, aliases=('custom_multichannels',))
# command line
always_copy = PrimitiveParameter(False, aliases=('copy',))
always_yes = PrimitiveParameter(False, aliases=('yes',))
channel_priority = PrimitiveParameter(True)
debug = PrimitiveParameter(False)
json = PrimitiveParameter(False)
offline = PrimitiveParameter(False)
quiet = PrimitiveParameter(False)
shortcuts = PrimitiveParameter(True)
show_channel_urls = PrimitiveParameter(None, parameter_type=(bool, NoneType))
update_dependencies = PrimitiveParameter(True, aliases=('update_deps',))
verbosity = PrimitiveParameter(0, aliases=('verbose',), parameter_type=int)
# conda_build
bld_path = PrimitiveParameter('')
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
_envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs', 'envs_path'),
string_delimiter=os.pathsep)
@property
def default_python(self):
ver = sys.version_info
return '%d.%d' % (ver.major, ver.minor)
@property
def arch_name(self):
m = machine()
if self.platform == 'linux' and m in non_x86_linux_machines:
return m
else:
return _arch_names[self.bits]
@property
def platform(self):
return _platform_map.get(sys.platform, 'unknown')
@property
def subdir(self):
m = machine()
if m in non_x86_linux_machines:
return 'linux-%s' % m
else:
return '%s-%d' % (self.platform, self.bits)
@property
def bits(self):
if self.force_32bit:
return 32
else:
return 8 * tuple.__itemsize__
@property
def local_build_root(self):
# TODO: import from conda_build, and fall back to something incredibly simple
if self.bld_path:
return expand(self.bld_path)
elif self.root_writable:
return join(self.conda_prefix, 'conda-bld')
else:
return expand('~/conda-bld')
@property
def root_dir(self):
# root_dir is an alias for root_prefix, we prefer the name "root_prefix"
# because it is more consistent with other names
return abspath(expanduser(self._root_dir))
@property
def root_writable(self):
return try_write(self.root_dir)
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
for p in concatv(self._envs_dirs,
(join(self.root_dir, 'envs'), )
if self.root_writable
else ('~/.conda/envs', join(self.root_dir, 'envs'))))
@property
def pkgs_dirs(self):
return [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in self.envs_dirs]
@property
def default_prefix(self):
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, ROOT_ENV_NAME):
return self.root_dir
elif os.sep in _default_env:
return abspath(_default_env)
else:
for envs_dir in self.envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
return default_prefix
return join(self.envs_dirs[0], _default_env)
@property
def prefix(self):
return get_prefix(self, self._argparse_args, False)
@property
def prefix_w_legacy_search(self):
return get_prefix(self, self._argparse_args, True)
@property
def clone_src(self):
assert self._argparse_args.clone is not None
return locate_prefix_by_name(self, self._argparse_args.clone)
@property
def conda_in_root(self):
return not conda_in_private_env()
@property
def conda_private(self):
return conda_in_private_env()
@property
def root_prefix(self):
return abspath(join(sys.prefix, '..', '..')) if conda_in_private_env() else sys.prefix
@property
def conda_prefix(self):
return sys.prefix
@memoizedproperty
def channel_alias(self):
from ..models.channel import Channel
location, scheme, auth, token = split_scheme_auth_token(self._channel_alias)
return Channel(scheme=scheme, auth=auth, location=location, token=token)
@property
def migrated_channel_aliases(self):
from ..models.channel import Channel
return tuple(Channel(scheme=scheme, auth=auth, location=location, token=token)
for location, scheme, auth, token in
(split_scheme_auth_token(c) for c in self._migrated_channel_aliases))
@memoizedproperty
def default_channels(self):
# the format for 'default_channels' is a list of strings that either
# - start with a scheme
# - are meant to be prepended with channel_alias
from ..models.channel import Channel
return tuple(Channel.make_simple_channel(self.channel_alias, v)
for v in self._default_channels)
@memoizedproperty
def local_build_root_channel(self):
from ..models.channel import Channel
url_parts = urlparse(path_to_url(self.local_build_root))
location, name = url_parts.path.rsplit('/', 1)
if not location:
location = '/'
return Channel(scheme=url_parts.scheme, location=location, name=name)
@memoizedproperty
def custom_multichannels(self):
from ..models.channel import Channel
default_custom_multichannels = {
'defaults': self.default_channels,
'local': (self.local_build_root_channel,),
}
all_channels = default_custom_multichannels, self._custom_multichannels
return odict((name, tuple(Channel(v) for v in c))
for name, c in concat(map(iteritems, all_channels)))
@memoizedproperty
def custom_channels(self):
from ..models.channel import Channel
custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
for name, url in iteritems(self._custom_channels))
all_sources = self.default_channels, (self.local_build_root_channel,), custom_channels
all_channels = (ch for ch in concat(all_sources))
return odict((x.name, x) for x in all_channels)
def conda_in_private_env():
# conda is located in its own private environment named '_conda'
return basename(sys.prefix) == '_conda' and basename(dirname(sys.prefix)) == 'envs'
def reset_context(search_path=SEARCH_PATH, argparse_args=None):
context.__init__(search_path, conda, argparse_args)
from ..models.channel import Channel
Channel._reset_state()
return context
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(context.root_dir, 'envs')):
return join(context.root_dir, 'pkgs32' if context.force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
def get_help_dict():
# this is a function so that most of the time it's not evaluated and loaded into memory
return {
'add_pip_as_python_dependency': dals("""
"""),
'always_yes': dals("""
"""),
'always_copy': dals("""
"""),
'changeps1': dals("""
"""),
'use_pip': dals("""
Use pip when listing packages with conda list. Note that this does not affect any
conda command or functionality other than the output of the command conda list.
"""),
'binstar_upload': dals("""
"""),
'allow_softlinks': dals("""
"""),
'self_update': dals("""
"""),
'show_channel_urls': dals("""
# show channel URLs when displaying what is going to be downloaded
# None means letting conda decide
"""),
'update_dependencies': dals("""
"""),
'channel_priority': dals("""
"""),
'ssl_verify': dals("""
# ssl_verify can be a boolean value or a filename string
"""),
'client_tls_cert': dals("""
# client_tls_cert can be a path pointing to a single file
# containing the private key and the certificate (e.g. .pem),
# or use 'client_tls_cert_key' in conjuction with 'client_tls_cert' for
# individual files
"""),
'client_tls_cert_key': dals("""
# used in conjunction with 'client_tls_cert' for a matching key file
"""),
'track_features': dals("""
"""),
'channels': dals("""
"""),
'disallow': dals("""
# set packages disallowed to be installed
"""),
'create_default_packages': dals("""
# packages which are added to a newly created environment by default
"""),
'envs_dirs': dals("""
"""),
'default_channels': dals("""
"""),
'proxy_servers': dals("""
"""),
'force_32bit': dals("""
CONDA_FORCE_32BIT should only be used when running conda-build (in order
to build 32-bit packages on a 64-bit system). We don't want to mention it
in the documentation, because it can mess up a lot of things.
""")
}
def get_prefix(ctx, args, search=True):
"""Get the prefix to operate in
Args:
ctx: the context of conda
args: the argparse args from the command line
search: whether search for prefix
Returns: the prefix
Raises: CondaEnvironmentNotFoundError if the prefix is invalid
"""
if args.name:
if '/' in args.name:
raise CondaValueError("'/' not allowed in environment name: %s" %
args.name, getattr(args, 'json', False))
if args.name == ROOT_ENV_NAME:
return ctx.root_dir
if search:
return locate_prefix_by_name(ctx, args.name)
else:
return join(ctx.envs_dirs[0], args.name)
elif args.prefix:
return abspath(expanduser(args.prefix))
else:
return ctx.default_prefix
def locate_prefix_by_name(ctx, name):
""" Find the location of a prefix given a conda env name.
Args:
ctx (Context): the context object
name (str): the name of prefix to find
Returns:
str: the location of the prefix found, or CondaValueError will raise if not found
Raises:
CondaValueError: when no prefix is found
"""
if name == ROOT_ENV_NAME:
return ctx.root_dir
# look for a directory named `name` in all envs_dirs AND in CWD
for envs_dir in chain(ctx.envs_dirs + (os.getcwd(),)):
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
raise CondaEnvironmentNotFoundError(name)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(context.root_dir) and
not context.root_writable)
try:
context = Context(SEARCH_PATH, conda, None)
except LoadError as e:
print(e, file=sys.stderr)
# Exception handler isn't loaded so use sys.exit
sys.exit(1)
| conda/base/context.py
--- a/conda/base/context.py
+++ b/conda/base/context.py
@@ -243,7 +243,6 @@ def local_build_root_channel(self):
location, name = url_parts.path.rsplit('/', 1)
if not location:
location = '/'
- assert name == 'conda-bld'
return Channel(scheme=url_parts.scheme, location=location, name=name)
@memoizedproperty |
regression bug: https://github.com/conda/conda/issues/3235 appears to have resurrected itself in another place
The bug was that conda handled the channel "http://conda-01" incorrectly. Here's the stack trace in conda 4.2.11:
```
Traceback (most recent call last):
File "C:\Miniconda3\lib\site-packages\conda\exceptions.py", line 479, in c
onda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Miniconda3\lib\site-packages\conda\cli\main.py", line 145, in _ma
in
exit_code = args.func(args, p)
File "C:\Miniconda3\lib\site-packages\conda\cli\main_update.py", line 65,
in execute
install(args, parser, 'update')
File "C:\Miniconda3\lib\site-packages\conda\cli\install.py", line 308, in
install
update_deps=context.update_dependencies)
File "C:\Miniconda3\lib\site-packages\conda\plan.py", line 526, in install
_actions
force=force, always_copy=always_copy)
File "C:\Miniconda3\lib\site-packages\conda\plan.py", line 308, in ensure_
linked_actions
fetched_in = is_fetched(dist)
File "C:\Miniconda3\lib\site-packages\conda\install.py", line 727, in is_f
etched
for fn in package_cache().get(dist, {}).get('files', ()):
File "C:\Miniconda3\lib\site-packages\conda\install.py", line 675, in pack
age_cache
add_cached_package(pdir, url)
File "C:\Miniconda3\lib\site-packages\conda\install.py", line 633, in add_
cached_package
schannel = Channel(url).canonical_name
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 161,
in __call__
c = Channel.from_value(value)
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 211,
in from_value
return Channel.from_url(value)
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 196,
in from_url
return parse_conda_channel_url(url)
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 132,
in parse_conda_channel_url
configured_token) = _read_channel_configuration(scheme, host, port, path
)
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 122,
in _read_channel_configuration
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or No
ne,
AttributeError: 'NoneType' object has no attribute 'strip'
```
regression bug: https://github.com/conda/conda/issues/3235 appears to have resurrected itself in another place
The bug was that conda handled the channel "http://conda-01" incorrectly. Here's the stack trace in conda 4.2.11:
```
Traceback (most recent call last):
File "C:\Miniconda3\lib\site-packages\conda\exceptions.py", line 479, in c
onda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Miniconda3\lib\site-packages\conda\cli\main.py", line 145, in _ma
in
exit_code = args.func(args, p)
File "C:\Miniconda3\lib\site-packages\conda\cli\main_update.py", line 65,
in execute
install(args, parser, 'update')
File "C:\Miniconda3\lib\site-packages\conda\cli\install.py", line 308, in
install
update_deps=context.update_dependencies)
File "C:\Miniconda3\lib\site-packages\conda\plan.py", line 526, in install
_actions
force=force, always_copy=always_copy)
File "C:\Miniconda3\lib\site-packages\conda\plan.py", line 308, in ensure_
linked_actions
fetched_in = is_fetched(dist)
File "C:\Miniconda3\lib\site-packages\conda\install.py", line 727, in is_f
etched
for fn in package_cache().get(dist, {}).get('files', ()):
File "C:\Miniconda3\lib\site-packages\conda\install.py", line 675, in pack
age_cache
add_cached_package(pdir, url)
File "C:\Miniconda3\lib\site-packages\conda\install.py", line 633, in add_
cached_package
schannel = Channel(url).canonical_name
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 161,
in __call__
c = Channel.from_value(value)
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 211,
in from_value
return Channel.from_url(value)
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 196,
in from_url
return parse_conda_channel_url(url)
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 132,
in parse_conda_channel_url
configured_token) = _read_channel_configuration(scheme, host, port, path
)
File "C:\Miniconda3\lib\site-packages\conda\models\channel.py", line 122,
in _read_channel_configuration
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or No
ne,
AttributeError: 'NoneType' object has no attribute 'strip'
```
| conda/models/channel.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from itertools import chain
from logging import getLogger
from requests.packages.urllib3.util import Url
from ..base.constants import DEFAULT_CHANNELS_UNIX, DEFAULT_CHANNELS_WIN, UTF8
from ..base.context import context
from ..common.compat import iteritems, odict, with_metaclass
from ..common.url import (has_scheme, is_url, is_windows_path, join_url, on_win, path_to_url,
split_conda_url_easy_parts, split_scheme_auth_token, urlparse)
try:
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concatv, topk
except ImportError:
from .._vendor.toolz.functoolz import excepts # NOQA
from .._vendor.toolz.itertoolz import concatv, topk # NOQA
log = getLogger(__name__)
# backward compatibility for conda-build
def get_conda_build_local_url():
return context.local_build_root,
"""
scheme <> auth <> location <> token <> channel <> subchannel <> platform <> package_filename
channel <> subchannel <> namespace <> package_name
"""
def tokenized_startswith(test_iterable, startswith_iterable):
return all(t == sw for t, sw in zip(test_iterable, startswith_iterable))
def tokenized_conda_url_startswith(test_url, startswith_url):
test_url, startswith_url = urlparse(test_url), urlparse(startswith_url)
if test_url.host != startswith_url.host or test_url.port != startswith_url.port:
return False
norm_url_path = lambda url: url.path.strip('/') or '/'
return tokenized_startswith(norm_url_path(test_url).split('/'),
norm_url_path(startswith_url).split('/'))
def _get_channel_for_name(channel_name):
def _get_channel_for_name_helper(name):
if name in context.custom_channels:
return context.custom_channels[name]
else:
test_name = name.rsplit('/', 1)[0] # progressively strip off path segments
if test_name == name:
return None
return _get_channel_for_name_helper(test_name)
channel = _get_channel_for_name_helper(channel_name)
if channel is not None:
# stripping off path threw information away from channel_name (i.e. any potential subname)
# channel.name *should still be* channel_name
channel.name = channel_name
return channel
else:
ca = context.channel_alias
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=channel_name)
def _read_channel_configuration(scheme, host, port, path):
# return location, name, scheme, auth, token
test_url = Url(host=host, port=port, path=path).url.rstrip('/')
# Step 1. migrated_custom_channels matches
for name, location in sorted(context.migrated_custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
location, _scheme, _auth, _token = split_scheme_auth_token(location)
if tokenized_conda_url_startswith(test_url, join_url(location, name)):
# translate location to new location, with new credentials
subname = test_url.replace(join_url(location, name), '', 1).strip('/')
channel_name = join_url(name, subname)
channel = _get_channel_for_name(channel_name)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
# Step 2. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
# Step 3. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
if test_url.startswith(that_test_url):
subname = test_url.replace(that_test_url, '', 1).strip('/')
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
# Step 4. channel_alias match
ca = context.channel_alias
if ca.location and test_url.startswith(ca.location):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
# Step 5. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
location, name = test_url.rsplit('/', 1)
if not location:
location = '/'
_scheme, _auth, _token = 'file', None, None
return location, name, _scheme, _auth, _token
# Step 6. fall through to host:port as channel_location and path as channel_name
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or None,
scheme or None, None, None)
def parse_conda_channel_url(url):
(scheme, auth, token, platform, package_filename,
host, port, path, query) = split_conda_url_easy_parts(url)
# recombine host, port, path to get a channel_name and channel_location
(channel_location, channel_name, configured_scheme, configured_auth,
configured_token) = _read_channel_configuration(scheme, host, port, path)
# if we came out with no channel_location or channel_name, we need to figure it out
# from host, port, path
assert channel_location is not None or channel_name is not None
return Channel(configured_scheme or 'https',
auth or configured_auth,
channel_location,
token or configured_token,
channel_name,
platform,
package_filename)
class ChannelType(type):
"""
This metaclass does basic caching and enables static constructor method usage with a
single arg.
"""
def __call__(cls, *args, **kwargs):
if len(args) == 1 and not kwargs:
value = args[0]
if isinstance(value, Channel):
return value
elif value in Channel._cache_:
return Channel._cache_[value]
else:
c = Channel.from_value(value)
Channel._cache_[value] = c
return c
else:
return super(ChannelType, cls).__call__(*args, **kwargs)
@with_metaclass(ChannelType)
class Channel(object):
_cache_ = dict()
@staticmethod
def _reset_state():
Channel._cache_ = dict()
def __init__(self, scheme=None, auth=None, location=None, token=None, name=None,
platform=None, package_filename=None):
self.scheme = scheme
self.auth = auth
self.location = location
self.token = token
self.name = name
self.platform = platform
self.package_filename = package_filename
@property
def channel_location(self):
return self.location
@property
def channel_name(self):
return self.name
@staticmethod
def from_url(url):
return parse_conda_channel_url(url)
@staticmethod
def from_channel_name(channel_name):
return _get_channel_for_name(channel_name)
@staticmethod
def from_value(value):
if value is None:
return Channel(name="<unknown>")
if hasattr(value, 'decode'):
value = value.decode(UTF8)
if has_scheme(value):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
elif value.startswith(('./', '..', '~', '/')) or is_windows_path(value):
return Channel.from_url(path_to_url(value))
elif value.endswith('.tar.bz2'):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
else:
# at this point assume we don't have a bare (non-scheme) url
# e.g. this would be bad: repo.continuum.io/pkgs/free
if value in context.custom_multichannels:
return MultiChannel(value, context.custom_multichannels[value])
else:
return Channel.from_channel_name(value)
@staticmethod
def make_simple_channel(channel_alias, channel_url, name=None):
ca = channel_alias
test_url, scheme, auth, token = split_scheme_auth_token(channel_url)
if name and scheme:
return Channel(scheme=scheme, auth=auth, location=test_url, token=token,
name=name.strip('/'))
if scheme:
if ca.location and test_url.startswith(ca.location):
location, name = ca.location, test_url.replace(ca.location, '', 1)
else:
url_parts = urlparse(test_url)
location, name = Url(host=url_parts.host, port=url_parts.port).url, url_parts.path
return Channel(scheme=scheme, auth=auth, location=location, token=token,
name=name.strip('/'))
else:
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=name and name.strip('/') or channel_url.strip('/'))
@property
def canonical_name(self):
for multiname, channels in iteritems(context.custom_multichannels):
for channel in channels:
if self.name == channel.name:
return multiname
for that_name in context.custom_channels:
if tokenized_startswith(self.name.split('/'), that_name.split('/')):
return self.name
if any(c.location == self.location
for c in concatv((context.channel_alias,), context.migrated_channel_aliases)):
return self.name
# fall back to the equivalent of self.base_url
# re-defining here because base_url for MultiChannel is None
return "%s://%s/%s" % (self.scheme, self.location, self.name)
def urls(self, with_credentials=False, platform=None):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
base = join_url(*base)
def _platforms():
p = platform or self.platform or context.subdir
return (p, 'noarch') if p != 'noarch' else ('noarch',)
bases = (join_url(base, p) for p in _platforms())
if with_credentials and self.auth:
return ["%s://%s@%s" % (self.scheme, self.auth, b) for b in bases]
else:
return ["%s://%s" % (self.scheme, b) for b in bases]
def url(self, with_credentials=False):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
if self.platform:
base.append(self.platform)
if self.package_filename:
base.append(self.package_filename)
else:
base.append(context.subdir)
base = join_url(*base)
if with_credentials and self.auth:
return "%s://%s@%s" % (self.scheme, self.auth, base)
else:
return "%s://%s" % (self.scheme, base)
@property
def base_url(self):
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def __str__(self):
return self.base_url
def __repr__(self):
return ("Channel(scheme=%s, auth=%s, location=%s, token=%s, name=%s, platform=%s, "
"package_filename=%s)" % (self.scheme,
self.auth and "%s:<PASSWORD>" % self.auth.split(':')[0],
self.location,
self.token and "<TOKEN>",
self.name,
self.platform,
self.package_filename))
def __eq__(self, other):
if isinstance(other, Channel):
return self.location == other.location and self.name == other.name
else:
return False
def __hash__(self):
return hash((self.location, self.name))
def __nonzero__(self):
return any((self.location, self.name))
def __bool__(self):
return self.__nonzero__()
@property
def url_channel_wtf(self):
return self.base_url, self.canonical_name
class MultiChannel(Channel):
def __init__(self, name, channels):
self.name = name
self.location = None
self._channels = channels
self.scheme = None
self.auth = None
self.token = None
self.platform = None
self.package_filename = None
@property
def channel_location(self):
return self.location
@property
def canonical_name(self):
return self.name
def urls(self, with_credentials=False, platform=None):
if platform and platform != context.subdir and self.name == 'defaults':
# necessary shenanigan because different platforms have different default channels
urls = DEFAULT_CHANNELS_WIN if 'win' in platform else DEFAULT_CHANNELS_UNIX
ca = context.channel_alias
_channels = tuple(Channel.make_simple_channel(ca, v) for v in urls)
else:
_channels = self._channels
return list(chain.from_iterable(c.urls(with_credentials, platform) for c in _channels))
@property
def base_url(self):
return None
def url(self, with_credentials=False):
return None
def prioritize_channels(channels, with_credentials=True, platform=None):
# ('https://conda.anaconda.org/conda-forge/osx-64/', ('conda-forge', 1))
result = odict()
for q, chn in enumerate(channels):
channel = Channel(chn)
for url in channel.urls(with_credentials, platform):
if url in result:
continue
result[url] = channel.canonical_name, q
return result
def offline_keep(url):
return not context.offline or not is_url(url) or url.startswith('file:/')
<|code_end|>
| conda/models/channel.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from itertools import chain
from logging import getLogger
from requests.packages.urllib3.util import Url
from ..base.constants import DEFAULT_CHANNELS_UNIX, DEFAULT_CHANNELS_WIN, UTF8
from ..base.context import context
from ..common.compat import iteritems, odict, with_metaclass
from ..common.url import (has_scheme, is_url, is_windows_path, join_url, on_win, path_to_url,
split_conda_url_easy_parts, split_scheme_auth_token, urlparse)
try:
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concatv, topk
except ImportError:
from .._vendor.toolz.functoolz import excepts # NOQA
from .._vendor.toolz.itertoolz import concatv, topk # NOQA
log = getLogger(__name__)
# backward compatibility for conda-build
def get_conda_build_local_url():
return context.local_build_root,
"""
scheme <> auth <> location <> token <> channel <> subchannel <> platform <> package_filename
channel <> subchannel <> namespace <> package_name
"""
def tokenized_startswith(test_iterable, startswith_iterable):
return all(t == sw for t, sw in zip(test_iterable, startswith_iterable))
def tokenized_conda_url_startswith(test_url, startswith_url):
test_url, startswith_url = urlparse(test_url), urlparse(startswith_url)
if test_url.host != startswith_url.host or test_url.port != startswith_url.port:
return False
norm_url_path = lambda url: url.path.strip('/') or '/'
return tokenized_startswith(norm_url_path(test_url).split('/'),
norm_url_path(startswith_url).split('/'))
def _get_channel_for_name(channel_name):
def _get_channel_for_name_helper(name):
if name in context.custom_channels:
return context.custom_channels[name]
else:
test_name = name.rsplit('/', 1)[0] # progressively strip off path segments
if test_name == name:
return None
return _get_channel_for_name_helper(test_name)
channel = _get_channel_for_name_helper(channel_name)
if channel is not None:
# stripping off path threw information away from channel_name (i.e. any potential subname)
# channel.name *should still be* channel_name
channel.name = channel_name
return channel
else:
ca = context.channel_alias
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=channel_name)
def _read_channel_configuration(scheme, host, port, path):
# return location, name, scheme, auth, token
path = path and path.rstrip('/')
test_url = Url(host=host, port=port, path=path).url
# Step 1. No path given; channel name is None
if not path:
return Url(host=host, port=port).url.rstrip('/'), None, scheme or None, None, None
# Step 2. migrated_custom_channels matches
for name, location in sorted(context.migrated_custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
location, _scheme, _auth, _token = split_scheme_auth_token(location)
if tokenized_conda_url_startswith(test_url, join_url(location, name)):
# translate location to new location, with new credentials
subname = test_url.replace(join_url(location, name), '', 1).strip('/')
channel_name = join_url(name, subname)
channel = _get_channel_for_name(channel_name)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
# Step 3. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
# Step 4. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
if test_url.startswith(that_test_url):
subname = test_url.replace(that_test_url, '', 1).strip('/')
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
# Step 5. channel_alias match
ca = context.channel_alias
if ca.location and test_url.startswith(ca.location):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
# Step 6. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
location, name = test_url.rsplit('/', 1)
if not location:
location = '/'
_scheme, _auth, _token = 'file', None, None
return location, name, _scheme, _auth, _token
# Step 7. fall through to host:port as channel_location and path as channel_name
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or None,
scheme or None, None, None)
def parse_conda_channel_url(url):
(scheme, auth, token, platform, package_filename,
host, port, path, query) = split_conda_url_easy_parts(url)
# recombine host, port, path to get a channel_name and channel_location
(channel_location, channel_name, configured_scheme, configured_auth,
configured_token) = _read_channel_configuration(scheme, host, port, path)
# if we came out with no channel_location or channel_name, we need to figure it out
# from host, port, path
assert channel_location is not None or channel_name is not None
return Channel(configured_scheme or 'https',
auth or configured_auth,
channel_location,
token or configured_token,
channel_name,
platform,
package_filename)
class ChannelType(type):
"""
This metaclass does basic caching and enables static constructor method usage with a
single arg.
"""
def __call__(cls, *args, **kwargs):
if len(args) == 1 and not kwargs:
value = args[0]
if isinstance(value, Channel):
return value
elif value in Channel._cache_:
return Channel._cache_[value]
else:
c = Channel.from_value(value)
Channel._cache_[value] = c
return c
else:
return super(ChannelType, cls).__call__(*args, **kwargs)
@with_metaclass(ChannelType)
class Channel(object):
_cache_ = dict()
@staticmethod
def _reset_state():
Channel._cache_ = dict()
def __init__(self, scheme=None, auth=None, location=None, token=None, name=None,
platform=None, package_filename=None):
self.scheme = scheme
self.auth = auth
self.location = location
self.token = token
self.name = name
self.platform = platform
self.package_filename = package_filename
@property
def channel_location(self):
return self.location
@property
def channel_name(self):
return self.name
@staticmethod
def from_url(url):
return parse_conda_channel_url(url)
@staticmethod
def from_channel_name(channel_name):
return _get_channel_for_name(channel_name)
@staticmethod
def from_value(value):
if value is None:
return Channel(name="<unknown>")
if hasattr(value, 'decode'):
value = value.decode(UTF8)
if has_scheme(value):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
elif value.startswith(('./', '..', '~', '/')) or is_windows_path(value):
return Channel.from_url(path_to_url(value))
elif value.endswith('.tar.bz2'):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
else:
# at this point assume we don't have a bare (non-scheme) url
# e.g. this would be bad: repo.continuum.io/pkgs/free
if value in context.custom_multichannels:
return MultiChannel(value, context.custom_multichannels[value])
else:
return Channel.from_channel_name(value)
@staticmethod
def make_simple_channel(channel_alias, channel_url, name=None):
ca = channel_alias
test_url, scheme, auth, token = split_scheme_auth_token(channel_url)
if name and scheme:
return Channel(scheme=scheme, auth=auth, location=test_url, token=token,
name=name.strip('/'))
if scheme:
if ca.location and test_url.startswith(ca.location):
location, name = ca.location, test_url.replace(ca.location, '', 1)
else:
url_parts = urlparse(test_url)
location, name = Url(host=url_parts.host, port=url_parts.port).url, url_parts.path
return Channel(scheme=scheme, auth=auth, location=location, token=token,
name=name.strip('/'))
else:
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=name and name.strip('/') or channel_url.strip('/'))
@property
def canonical_name(self):
for multiname, channels in iteritems(context.custom_multichannels):
for channel in channels:
if self.name == channel.name:
return multiname
for that_name in context.custom_channels:
if self.name and tokenized_startswith(self.name.split('/'), that_name.split('/')):
return self.name
if any(c.location == self.location
for c in concatv((context.channel_alias,), context.migrated_channel_aliases)):
return self.name
# fall back to the equivalent of self.base_url
# re-defining here because base_url for MultiChannel is None
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def urls(self, with_credentials=False, platform=None):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
base = join_url(*base)
def _platforms():
p = platform or self.platform or context.subdir
return (p, 'noarch') if p != 'noarch' else ('noarch',)
bases = (join_url(base, p) for p in _platforms())
if with_credentials and self.auth:
return ["%s://%s@%s" % (self.scheme, self.auth, b) for b in bases]
else:
return ["%s://%s" % (self.scheme, b) for b in bases]
def url(self, with_credentials=False):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
if self.platform:
base.append(self.platform)
if self.package_filename:
base.append(self.package_filename)
else:
base.append(context.subdir)
base = join_url(*base)
if with_credentials and self.auth:
return "%s://%s@%s" % (self.scheme, self.auth, base)
else:
return "%s://%s" % (self.scheme, base)
@property
def base_url(self):
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def __str__(self):
return self.base_url
def __repr__(self):
return ("Channel(scheme=%s, auth=%s, location=%s, token=%s, name=%s, platform=%s, "
"package_filename=%s)" % (self.scheme,
self.auth and "%s:<PASSWORD>" % self.auth.split(':')[0],
self.location,
self.token and "<TOKEN>",
self.name,
self.platform,
self.package_filename))
def __eq__(self, other):
if isinstance(other, Channel):
return self.location == other.location and self.name == other.name
else:
return False
def __hash__(self):
return hash((self.location, self.name))
def __nonzero__(self):
return any((self.location, self.name))
def __bool__(self):
return self.__nonzero__()
@property
def url_channel_wtf(self):
return self.base_url, self.canonical_name
class MultiChannel(Channel):
def __init__(self, name, channels):
self.name = name
self.location = None
self._channels = channels
self.scheme = None
self.auth = None
self.token = None
self.platform = None
self.package_filename = None
@property
def channel_location(self):
return self.location
@property
def canonical_name(self):
return self.name
def urls(self, with_credentials=False, platform=None):
if platform and platform != context.subdir and self.name == 'defaults':
# necessary shenanigan because different platforms have different default channels
urls = DEFAULT_CHANNELS_WIN if 'win' in platform else DEFAULT_CHANNELS_UNIX
ca = context.channel_alias
_channels = tuple(Channel.make_simple_channel(ca, v) for v in urls)
else:
_channels = self._channels
return list(chain.from_iterable(c.urls(with_credentials, platform) for c in _channels))
@property
def base_url(self):
return None
def url(self, with_credentials=False):
return None
def prioritize_channels(channels, with_credentials=True, platform=None):
# ('https://conda.anaconda.org/conda-forge/osx-64/', ('conda-forge', 1))
result = odict()
for q, chn in enumerate(channels):
channel = Channel(chn)
for url in channel.urls(with_credentials, platform):
if url in result:
continue
result[url] = channel.canonical_name, q
return result
def offline_keep(url):
return not context.offline or not is_url(url) or url.startswith('file:/')
| conda/models/channel.py
--- a/conda/models/channel.py
+++ b/conda/models/channel.py
@@ -73,9 +73,14 @@ def _get_channel_for_name_helper(name):
def _read_channel_configuration(scheme, host, port, path):
# return location, name, scheme, auth, token
- test_url = Url(host=host, port=port, path=path).url.rstrip('/')
+ path = path and path.rstrip('/')
+ test_url = Url(host=host, port=port, path=path).url
- # Step 1. migrated_custom_channels matches
+ # Step 1. No path given; channel name is None
+ if not path:
+ return Url(host=host, port=port).url.rstrip('/'), None, scheme or None, None, None
+
+ # Step 2. migrated_custom_channels matches
for name, location in sorted(context.migrated_custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
location, _scheme, _auth, _token = split_scheme_auth_token(location)
@@ -86,14 +91,14 @@ def _read_channel_configuration(scheme, host, port, path):
channel = _get_channel_for_name(channel_name)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
- # Step 2. migrated_channel_aliases matches
+ # Step 3. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
- # Step 3. custom_channels matches
+ # Step 4. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
@@ -102,13 +107,13 @@ def _read_channel_configuration(scheme, host, port, path):
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
- # Step 4. channel_alias match
+ # Step 5. channel_alias match
ca = context.channel_alias
if ca.location and test_url.startswith(ca.location):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
- # Step 5. not-otherwise-specified file://-type urls
+ # Step 6. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
@@ -118,7 +123,7 @@ def _read_channel_configuration(scheme, host, port, path):
_scheme, _auth, _token = 'file', None, None
return location, name, _scheme, _auth, _token
- # Step 6. fall through to host:port as channel_location and path as channel_name
+ # Step 7. fall through to host:port as channel_location and path as channel_name
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or None,
scheme or None, None, None)
@@ -250,7 +255,7 @@ def canonical_name(self):
return multiname
for that_name in context.custom_channels:
- if tokenized_startswith(self.name.split('/'), that_name.split('/')):
+ if self.name and tokenized_startswith(self.name.split('/'), that_name.split('/')):
return self.name
if any(c.location == self.location
@@ -259,7 +264,7 @@ def canonical_name(self):
# fall back to the equivalent of self.base_url
# re-defining here because base_url for MultiChannel is None
- return "%s://%s/%s" % (self.scheme, self.location, self.name)
+ return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def urls(self, with_credentials=False, platform=None):
base = [self.location] |
conda config --show --json fails with TypeError is not JSON serializable.
```
$conda config --show --json
{
"error": "Traceback (most recent call last):\n File \"C:\\Anaconda\\lib\\site-packages\\conda\\exceptions.py\", line 479, in conda_exception_handler\n return_value = func(*args, **kwargs)\n File \"C:\\Anaconda\\lib\\site-packages\\conda\\cli\\main.py\", line 145, in _main\n exit_code = args.func(args, p)\n File \"C:\\Anaconda\\lib\\site-packages\\conda\\cli\\main_config.py\", line 230, in execute\n execute_config(args, parser)\n File \"C:\\Anaconda\\lib\\site-packages\\conda\\cli\\main_config.py\", line 306, in execute_config\n print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': ')))\n File \"C:\\Anaconda\\lib\\json\\__init__.py\", line 251, in dumps\n sort_keys=sort_keys, **kw).encode(obj)\n File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 209, in encode\n chunks = list(chunks)\n
File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 434, in _iterencode\n for chunk in _iterencode_dict(o, _current_indent_level):\n File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 408, in _iterencode_dict\n for chunk in chunks:\n File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 442, in _iterencode\n o = _default(o)\n File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 184, in default\n raise TypeError(repr(o) + \" is not JSON serializable\")\nTypeError: Channel(scheme=https, auth=None, location=conda.anaconda.org, token=None, name=None, platform=None, package_filename=None) is not JSON serializable\n"
}
```
expanding the traceback for better readability:
```
Traceback (most recent call last):
File \"C:\\Anaconda\\lib\\site-packages\\conda\\exceptions.py\", line 479, in conda_exception_handler
return_value = func(*args, **kwargs)
File \"C:\\Anaconda\\lib\\site-packages\\conda\\cli\\main.py\", line 145, in _main
exit_code = args.func(args, p)
File \"C:\\Anaconda\\lib\\site-packages\\conda\\cli\\main_config.py\", line 230, in execute
execute_config(args, parser)
File \"C:\\Anaconda\\lib\\site-packages\\conda\\cli\\main_config.py\", line 306, in execute_config
print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': ')))
File \"C:\\Anaconda\\lib\\json\\__init__.py\", line 251, in dumps
sort_keys=sort_keys, **kw).encode(obj)
File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 209, in encode
chunks = list(chunks)
File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 434, in _iterencode
for chunk in _iterencode_dict(o, _current_indent_level):
File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 408, in _iterencode_dict
for chunk in chunks:
File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 442, in _iterencode
o = _default(o)
File \"C:\\Anaconda\\lib\\json\\encoder.py\", line 184, in default
raise TypeError(repr(o) + \" is not JSON serializable\")
TypeError: Channel(scheme=https, auth=None, location=conda.anaconda.org, token=None, name=None, platform=None, package_filename=None) is not JSON serializable
```
`conda config --show` works fine.
Tested with:
```
Current conda install:
platform : win-64
conda version : 4.2.11
conda is private : False
conda-env version : 4.2.11
conda-build version : 1.21.3
python version : 2.7.12.final.0
requests version : 2.11.1
```
and
```
Current conda install:
platform : linux-64
conda version : 4.2.12
conda is private : False
conda-env version : 4.2.12
conda-build version : 1.20.0
python version : 2.7.12.final.0
requests version : 2.9.1
```
`conda config --json --show-sources` works fine.
| conda/cli/main_config.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function, unicode_literals
import collections
import json
import os
import sys
from .common import (Completer, add_parser_json, stdout_json_success)
from .. import CondaError
from .._vendor.auxlib.compat import isiterable
from .._vendor.auxlib.type_coercion import boolify
from ..base.context import context
from ..common.configuration import pretty_list, pretty_map
from ..common.yaml import yaml_dump, yaml_load
from ..compat import iteritems, string_types
from ..config import (rc_bool_keys, rc_list_keys, rc_other, rc_string_keys, sys_rc_path,
user_rc_path)
from ..exceptions import CondaKeyError, CondaValueError, CouldntParseError
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--show",
action="store_true",
help="Display all configuration values as calculated and compiled.",
)
action.add_argument(
"--show-sources",
action="store_true",
help="Display all identified configuration sources.",
)
action.add_argument(
"--validate",
action="store_true",
help="Validate all configuration sources.",
)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar='KEY',
choices=BoolOrListKey()
)
action.add_argument(
"--append",
nargs=2,
action="append",
help="""Add one configuration value to the end of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--prepend", "--add",
nargs=2,
action="append",
help="""Add one configuration value to the beginning of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParseError, NotImplementedError) as e:
raise CondaError(e)
def format_dict(d):
lines = []
for k, v in iteritems(d):
if isinstance(v, collections.Mapping):
if v:
lines.append("%s:" % k)
lines.append(pretty_map(v))
else:
lines.append("%s: {}" % k)
elif isiterable(v):
if v:
lines.append("%s:" % k)
lines.append(pretty_list(v))
else:
lines.append("%s: []" % k)
else:
lines.append("%s: %s" % (k, v if v is not None else "None"))
return lines
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.show_sources:
if context.json:
print(json.dumps(context.collect_all(), sort_keys=True,
indent=2, separators=(',', ': ')))
else:
lines = []
for source, reprs in iteritems(context.collect_all()):
lines.append("==> %s <==" % source)
lines.extend(format_dict(reprs))
lines.append('')
print('\n'.join(lines))
return
if args.show:
from collections import OrderedDict
d = OrderedDict((key, getattr(context, key))
for key in sorted(('add_anaconda_token',
'add_pip_as_python_dependency',
'allow_softlinks',
'always_copy',
'always_yes',
'auto_update_conda',
'binstar_upload',
'changeps1',
'channel_alias',
'channel_priority',
'channels',
'client_ssl_cert',
'client_ssl_cert_key',
'create_default_packages',
'debug',
'default_channels',
'disallow',
'envs_dirs',
'json',
'offline',
'proxy_servers',
'quiet',
'shortcuts',
'show_channel_urls',
'ssl_verify',
'track_features',
'update_dependencies',
'use_pip',
'verbosity',
)))
if context.json:
print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': ')))
else:
print('\n'.join(format_dict(d)))
context.validate_configuration()
return
if args.validate:
context.validate_all()
return
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh) or {}
else:
rc_config = {}
# Get
if args.get is not None:
context.validate_all()
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not context.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if context.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else: # assume the key is a list-type
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
items = rc_config.get(key, [])
numitems = len(items)
for q, item in enumerate(reversed(items)):
# Use repr so that it can be pasted back in to conda config --add
if key == "channels" and q in (0, numitems-1):
print("--add", key, repr(item),
" # lowest priority" if q == 0 else " # highest priority")
else:
print("--add", key, repr(item))
# prepend, append, add
for arg, prepend in zip((args.prepend, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
if key not in rc_list_keys:
raise CondaValueError("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key))
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParseError("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
arglist = rc_config.setdefault(key, [])
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
item, key, "top" if prepend else "bottom")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not context.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
arglist.insert(0 if prepend else len(arglist), item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
if key in set_bools:
rc_config[key] = boolify(item)
elif key in set_strings:
assert isinstance(item, string_types)
rc_config[key] = item
else:
raise CondaValueError("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key))
# Remove
for key, item in args.remove:
if key not in rc_config:
if key != 'channels':
raise CondaKeyError(key, "key %r is not in the config file" % key)
rc_config[key] = ['defaults']
if item not in rc_config[key]:
raise CondaKeyError(key, "%r is not in the %r key of the config file" %
(item, key))
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
raise CondaKeyError(key, "key %r is not in the config file" %
key)
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if context.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
<|code_end|>
conda/models/channel.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from itertools import chain
from logging import getLogger
from requests.packages.urllib3.util import Url
from ..base.constants import (DEFAULT_CHANNELS_UNIX, DEFAULT_CHANNELS_WIN, MAX_CHANNEL_PRIORITY,
UTF8)
from ..base.context import context
from ..common.compat import iteritems, odict, with_metaclass
from ..common.url import (has_scheme, is_url, is_windows_path, join_url, on_win, path_to_url,
split_conda_url_easy_parts, split_scheme_auth_token, urlparse)
try:
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concatv, topk
except ImportError:
from .._vendor.toolz.functoolz import excepts # NOQA
from .._vendor.toolz.itertoolz import concatv, topk # NOQA
log = getLogger(__name__)
# backward compatibility for conda-build
def get_conda_build_local_url():
return context.local_build_root,
"""
scheme <> auth <> location <> token <> channel <> subchannel <> platform <> package_filename
channel <> subchannel <> namespace <> package_name
"""
def tokenized_startswith(test_iterable, startswith_iterable):
return all(t == sw for t, sw in zip(test_iterable, startswith_iterable))
def tokenized_conda_url_startswith(test_url, startswith_url):
test_url, startswith_url = urlparse(test_url), urlparse(startswith_url)
if test_url.host != startswith_url.host or test_url.port != startswith_url.port:
return False
norm_url_path = lambda url: url.path.strip('/') or '/'
return tokenized_startswith(norm_url_path(test_url).split('/'),
norm_url_path(startswith_url).split('/'))
def _get_channel_for_name(channel_name):
def _get_channel_for_name_helper(name):
if name in context.custom_channels:
return context.custom_channels[name]
else:
test_name = name.rsplit('/', 1)[0] # progressively strip off path segments
if test_name == name:
return None
return _get_channel_for_name_helper(test_name)
channel = _get_channel_for_name_helper(channel_name)
if channel is not None:
# stripping off path threw information away from channel_name (i.e. any potential subname)
# channel.name *should still be* channel_name
channel.name = channel_name
return channel
else:
ca = context.channel_alias
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=channel_name)
def _read_channel_configuration(scheme, host, port, path):
# return location, name, scheme, auth, token
path = path and path.rstrip('/')
test_url = Url(host=host, port=port, path=path).url
# Step 1. No path given; channel name is None
if not path:
return Url(host=host, port=port).url.rstrip('/'), None, scheme or None, None, None
# Step 2. migrated_custom_channels matches
for name, location in sorted(context.migrated_custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
location, _scheme, _auth, _token = split_scheme_auth_token(location)
if tokenized_conda_url_startswith(test_url, join_url(location, name)):
# translate location to new location, with new credentials
subname = test_url.replace(join_url(location, name), '', 1).strip('/')
channel_name = join_url(name, subname)
channel = _get_channel_for_name(channel_name)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
# Step 3. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
# Step 4. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
if test_url.startswith(that_test_url):
subname = test_url.replace(that_test_url, '', 1).strip('/')
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
# Step 5. channel_alias match
ca = context.channel_alias
if ca.location and test_url.startswith(ca.location):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
# Step 6. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
location, name = test_url.rsplit('/', 1)
if not location:
location = '/'
_scheme, _auth, _token = 'file', None, None
return location, name, _scheme, _auth, _token
# Step 7. fall through to host:port as channel_location and path as channel_name
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or None,
scheme or None, None, None)
def parse_conda_channel_url(url):
(scheme, auth, token, platform, package_filename,
host, port, path, query) = split_conda_url_easy_parts(url)
# recombine host, port, path to get a channel_name and channel_location
(channel_location, channel_name, configured_scheme, configured_auth,
configured_token) = _read_channel_configuration(scheme, host, port, path)
# if we came out with no channel_location or channel_name, we need to figure it out
# from host, port, path
assert channel_location is not None or channel_name is not None
return Channel(configured_scheme or 'https',
auth or configured_auth,
channel_location,
token or configured_token,
channel_name,
platform,
package_filename)
class ChannelType(type):
"""
This metaclass does basic caching and enables static constructor method usage with a
single arg.
"""
def __call__(cls, *args, **kwargs):
if len(args) == 1 and not kwargs:
value = args[0]
if isinstance(value, Channel):
return value
elif value in Channel._cache_:
return Channel._cache_[value]
else:
c = Channel.from_value(value)
Channel._cache_[value] = c
return c
else:
return super(ChannelType, cls).__call__(*args, **kwargs)
@with_metaclass(ChannelType)
class Channel(object):
_cache_ = dict()
@staticmethod
def _reset_state():
Channel._cache_ = dict()
def __init__(self, scheme=None, auth=None, location=None, token=None, name=None,
platform=None, package_filename=None):
self.scheme = scheme
self.auth = auth
self.location = location
self.token = token
self.name = name
self.platform = platform
self.package_filename = package_filename
@property
def channel_location(self):
return self.location
@property
def channel_name(self):
return self.name
@staticmethod
def from_url(url):
return parse_conda_channel_url(url)
@staticmethod
def from_channel_name(channel_name):
return _get_channel_for_name(channel_name)
@staticmethod
def from_value(value):
if value is None:
return Channel(name="<unknown>")
if hasattr(value, 'decode'):
value = value.decode(UTF8)
if has_scheme(value):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
elif value.startswith(('./', '..', '~', '/')) or is_windows_path(value):
return Channel.from_url(path_to_url(value))
elif value.endswith('.tar.bz2'):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
else:
# at this point assume we don't have a bare (non-scheme) url
# e.g. this would be bad: repo.continuum.io/pkgs/free
if value in context.custom_multichannels:
return MultiChannel(value, context.custom_multichannels[value])
else:
return Channel.from_channel_name(value)
@staticmethod
def make_simple_channel(channel_alias, channel_url, name=None):
ca = channel_alias
test_url, scheme, auth, token = split_scheme_auth_token(channel_url)
if name and scheme:
return Channel(scheme=scheme, auth=auth, location=test_url, token=token,
name=name.strip('/'))
if scheme:
if ca.location and test_url.startswith(ca.location):
location, name = ca.location, test_url.replace(ca.location, '', 1)
else:
url_parts = urlparse(test_url)
location, name = Url(host=url_parts.host, port=url_parts.port).url, url_parts.path
return Channel(scheme=scheme, auth=auth, location=location, token=token,
name=name.strip('/'))
else:
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=name and name.strip('/') or channel_url.strip('/'))
@property
def canonical_name(self):
for multiname, channels in iteritems(context.custom_multichannels):
for channel in channels:
if self.name == channel.name:
return multiname
for that_name in context.custom_channels:
if self.name and tokenized_startswith(self.name.split('/'), that_name.split('/')):
return self.name
if any(c.location == self.location
for c in concatv((context.channel_alias,), context.migrated_channel_aliases)):
return self.name
# fall back to the equivalent of self.base_url
# re-defining here because base_url for MultiChannel is None
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def urls(self, with_credentials=False, platform=None):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
base = join_url(*base)
def _platforms():
p = platform or self.platform or context.subdir
return (p, 'noarch') if p != 'noarch' else ('noarch',)
bases = (join_url(base, p) for p in _platforms())
if with_credentials and self.auth:
return ["%s://%s@%s" % (self.scheme, self.auth, b) for b in bases]
else:
return ["%s://%s" % (self.scheme, b) for b in bases]
def url(self, with_credentials=False):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
if self.platform:
base.append(self.platform)
if self.package_filename:
base.append(self.package_filename)
else:
base.append(context.subdir)
base = join_url(*base)
if with_credentials and self.auth:
return "%s://%s@%s" % (self.scheme, self.auth, base)
else:
return "%s://%s" % (self.scheme, base)
@property
def base_url(self):
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def __str__(self):
return self.base_url
def __repr__(self):
return ("Channel(scheme=%s, auth=%s, location=%s, token=%s, name=%s, platform=%s, "
"package_filename=%s)" % (self.scheme,
self.auth and "%s:<PASSWORD>" % self.auth.split(':')[0],
self.location,
self.token and "<TOKEN>",
self.name,
self.platform,
self.package_filename))
def __eq__(self, other):
if isinstance(other, Channel):
return self.location == other.location and self.name == other.name
else:
return False
def __hash__(self):
return hash((self.location, self.name))
def __nonzero__(self):
return any((self.location, self.name))
def __bool__(self):
return self.__nonzero__()
@property
def url_channel_wtf(self):
return self.base_url, self.canonical_name
class MultiChannel(Channel):
def __init__(self, name, channels):
self.name = name
self.location = None
self._channels = channels
self.scheme = None
self.auth = None
self.token = None
self.platform = None
self.package_filename = None
@property
def channel_location(self):
return self.location
@property
def canonical_name(self):
return self.name
def urls(self, with_credentials=False, platform=None):
if platform and platform != context.subdir and self.name == 'defaults':
# necessary shenanigan because different platforms have different default channels
urls = DEFAULT_CHANNELS_WIN if 'win' in platform else DEFAULT_CHANNELS_UNIX
ca = context.channel_alias
_channels = tuple(Channel.make_simple_channel(ca, v) for v in urls)
else:
_channels = self._channels
return list(chain.from_iterable(c.urls(with_credentials, platform) for c in _channels))
@property
def base_url(self):
return None
def url(self, with_credentials=False):
return None
def prioritize_channels(channels, with_credentials=True, platform=None):
# ('https://conda.anaconda.org/conda-forge/osx-64/', ('conda-forge', 1))
result = odict()
for q, chn in enumerate(channels):
channel = Channel(chn)
for url in channel.urls(with_credentials, platform):
if url in result:
continue
result[url] = channel.canonical_name, min(q, MAX_CHANNEL_PRIORITY - 1)
return result
def offline_keep(url):
return not context.offline or not is_url(url) or url.startswith('file:/')
<|code_end|>
| conda/cli/main_config.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function, unicode_literals
import collections
import json
import os
import sys
from .common import (Completer, add_parser_json, stdout_json_success)
from .. import CondaError
from .._vendor.auxlib.compat import isiterable
from .._vendor.auxlib.entity import EntityEncoder
from .._vendor.auxlib.type_coercion import boolify
from ..base.context import context
from ..common.configuration import pretty_list, pretty_map
from ..common.yaml import yaml_dump, yaml_load
from ..compat import iteritems, string_types
from ..config import (rc_bool_keys, rc_list_keys, rc_other, rc_string_keys, sys_rc_path,
user_rc_path)
from ..exceptions import CondaKeyError, CondaValueError, CouldntParseError
descr = """
Modify configuration values in .condarc. This is modeled after the git
config command. Writes to the user .condarc file (%s) by default.
""" % user_rc_path
# Note, the extra whitespace in the list keys is on purpose. It's so the
# formatting from help2man is still valid YAML (otherwise it line wraps the
# keys like "- conda - defaults"). Technically the parser here still won't
# recognize it because it removes the indentation, but at least it will be
# valid.
additional_descr = """
See http://conda.pydata.org/docs/config.html for details on all the options
that can go in .condarc.
List keys, like
channels:
- conda
- defaults
are modified with the --add and --remove options. For example
conda config --add channels r
on the above configuration would prepend the key 'r', giving
channels:
- r
- conda
- defaults
Note that the key 'channels' implicitly contains the key 'defaults' if it has
not been configured yet.
Boolean keys, like
always_yes: true
are modified with --set and removed with --remove-key. For example
conda config --set always_yes false
gives
always_yes: false
Note that in YAML, "yes", "YES", "on", "true", "True", and "TRUE" are all
valid ways to spell "true", and "no", "NO", "off", "false", "False", and
"FALSE", are all valid ways to spell "false".
The .condarc file is YAML, and any valid YAML syntax is allowed.
"""
# Note, the formatting of this is designed to work well with help2man
example = """
Examples:
Get the channels defined in the system .condarc:
conda config --get channels --system
Add the 'foo' Binstar channel:
conda config --add channels foo
Disable the 'show_channel_urls' option:
conda config --set show_channel_urls no
"""
class SingleValueKey(Completer):
def _get_items(self):
return rc_bool_keys + \
rc_string_keys + \
['yes', 'no', 'on', 'off', 'true', 'false']
class ListKey(Completer):
def _get_items(self):
return rc_list_keys
class BoolOrListKey(Completer):
def __contains__(self, other):
return other in self.get_items()
def _get_items(self):
return rc_list_keys + rc_bool_keys
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'config',
description=descr,
help=descr,
epilog=additional_descr + example,
)
add_parser_json(p)
# TODO: use argparse.FileType
location = p.add_mutually_exclusive_group()
location.add_argument(
"--system",
action="store_true",
help="""Write to the system .condarc file ({system}). Otherwise writes to the user
config file ({user}).""".format(system=sys_rc_path,
user=user_rc_path),
)
location.add_argument(
"--file",
action="store",
help="""Write to the given file. Otherwise writes to the user config file ({user})
or the file path given by the 'CONDARC' environment variable, if it is set
(default: %(default)s).""".format(user=user_rc_path),
default=os.environ.get('CONDARC', user_rc_path)
)
# XXX: Does this really have to be mutually exclusive. I think the below
# code will work even if it is a regular group (although combination of
# --add and --remove with the same keys will not be well-defined).
action = p.add_mutually_exclusive_group(required=True)
action.add_argument(
"--show",
action="store_true",
help="Display all configuration values as calculated and compiled.",
)
action.add_argument(
"--show-sources",
action="store_true",
help="Display all identified configuration sources.",
)
action.add_argument(
"--validate",
action="store_true",
help="Validate all configuration sources.",
)
action.add_argument(
"--get",
nargs='*',
action="store",
help="Get a configuration value.",
default=None,
metavar='KEY',
choices=BoolOrListKey()
)
action.add_argument(
"--append",
nargs=2,
action="append",
help="""Add one configuration value to the end of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--prepend", "--add",
nargs=2,
action="append",
help="""Add one configuration value to the beginning of a list key.""",
default=[],
choices=ListKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--set",
nargs=2,
action="append",
help="""Set a boolean or string key""",
default=[],
choices=SingleValueKey(),
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove",
nargs=2,
action="append",
help="""Remove a configuration value from a list key. This removes
all instances of the value.""",
default=[],
metavar=('KEY', 'VALUE'),
)
action.add_argument(
"--remove-key",
nargs=1,
action="append",
help="""Remove a configuration key (and all its values).""",
default=[],
metavar="KEY",
)
p.add_argument(
"-f", "--force",
action="store_true",
help="""Write to the config file using the yaml parser. This will
remove any comments or structure from the file."""
)
p.set_defaults(func=execute)
def execute(args, parser):
try:
execute_config(args, parser)
except (CouldntParseError, NotImplementedError) as e:
raise CondaError(e)
def format_dict(d):
lines = []
for k, v in iteritems(d):
if isinstance(v, collections.Mapping):
if v:
lines.append("%s:" % k)
lines.append(pretty_map(v))
else:
lines.append("%s: {}" % k)
elif isiterable(v):
if v:
lines.append("%s:" % k)
lines.append(pretty_list(v))
else:
lines.append("%s: []" % k)
else:
lines.append("%s: %s" % (k, v if v is not None else "None"))
return lines
def execute_config(args, parser):
json_warnings = []
json_get = {}
if args.show_sources:
if context.json:
print(json.dumps(context.collect_all(), sort_keys=True,
indent=2, separators=(',', ': ')))
else:
lines = []
for source, reprs in iteritems(context.collect_all()):
lines.append("==> %s <==" % source)
lines.extend(format_dict(reprs))
lines.append('')
print('\n'.join(lines))
return
if args.show:
from collections import OrderedDict
d = OrderedDict((key, getattr(context, key))
for key in sorted(('add_anaconda_token',
'add_pip_as_python_dependency',
'allow_softlinks',
'always_copy',
'always_yes',
'auto_update_conda',
'binstar_upload',
'changeps1',
'channel_alias',
'channel_priority',
'channels',
'client_ssl_cert',
'client_ssl_cert_key',
'create_default_packages',
'debug',
'default_channels',
'disallow',
'envs_dirs',
'json',
'offline',
'proxy_servers',
'quiet',
'shortcuts',
'show_channel_urls',
'ssl_verify',
'track_features',
'update_dependencies',
'use_pip',
'verbosity',
)))
if context.json:
print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': '),
cls=EntityEncoder))
else:
print('\n'.join(format_dict(d)))
context.validate_configuration()
return
if args.validate:
context.validate_all()
return
if args.system:
rc_path = sys_rc_path
elif args.file:
rc_path = args.file
else:
rc_path = user_rc_path
# read existing condarc
if os.path.exists(rc_path):
with open(rc_path, 'r') as fh:
rc_config = yaml_load(fh) or {}
else:
rc_config = {}
# Get
if args.get is not None:
context.validate_all()
if args.get == []:
args.get = sorted(rc_config.keys())
for key in args.get:
if key not in rc_list_keys + rc_bool_keys + rc_string_keys:
if key not in rc_other:
message = "unknown key %s" % key
if not context.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
continue
if key not in rc_config:
continue
if context.json:
json_get[key] = rc_config[key]
continue
if isinstance(rc_config[key], (bool, string_types)):
print("--set", key, rc_config[key])
else: # assume the key is a list-type
# Note, since conda config --add prepends, these are printed in
# the reverse order so that entering them in this order will
# recreate the same file
items = rc_config.get(key, [])
numitems = len(items)
for q, item in enumerate(reversed(items)):
# Use repr so that it can be pasted back in to conda config --add
if key == "channels" and q in (0, numitems-1):
print("--add", key, repr(item),
" # lowest priority" if q == 0 else " # highest priority")
else:
print("--add", key, repr(item))
# prepend, append, add
for arg, prepend in zip((args.prepend, args.append), (True, False)):
for key, item in arg:
if key == 'channels' and key not in rc_config:
rc_config[key] = ['defaults']
if key not in rc_list_keys:
raise CondaValueError("key must be one of %s, not %r" %
(', '.join(rc_list_keys), key))
if not isinstance(rc_config.get(key, []), list):
bad = rc_config[key].__class__.__name__
raise CouldntParseError("key %r should be a list, not %s." % (key, bad))
if key == 'default_channels' and rc_path != sys_rc_path:
msg = "'default_channels' is only configurable for system installs"
raise NotImplementedError(msg)
arglist = rc_config.setdefault(key, [])
if item in arglist:
# Right now, all list keys should not contain duplicates
message = "Warning: '%s' already in '%s' list, moving to the %s" % (
item, key, "top" if prepend else "bottom")
arglist = rc_config[key] = [p for p in arglist if p != item]
if not context.json:
print(message, file=sys.stderr)
else:
json_warnings.append(message)
arglist.insert(0 if prepend else len(arglist), item)
# Set
set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys)
for key, item in args.set:
# Check key and value
if key in set_bools:
rc_config[key] = boolify(item)
elif key in set_strings:
assert isinstance(item, string_types)
rc_config[key] = item
else:
raise CondaValueError("Error key must be one of %s, not %s" %
(', '.join(set_bools | set_strings), key))
# Remove
for key, item in args.remove:
if key not in rc_config:
if key != 'channels':
raise CondaKeyError(key, "key %r is not in the config file" % key)
rc_config[key] = ['defaults']
if item not in rc_config[key]:
raise CondaKeyError(key, "%r is not in the %r key of the config file" %
(item, key))
rc_config[key] = [i for i in rc_config[key] if i != item]
# Remove Key
for key, in args.remove_key:
if key not in rc_config:
raise CondaKeyError(key, "key %r is not in the config file" %
key)
del rc_config[key]
# config.rc_keys
with open(rc_path, 'w') as rc:
rc.write(yaml_dump(rc_config))
if context.json:
stdout_json_success(
rc_path=rc_path,
warnings=json_warnings,
get=json_get
)
return
conda/models/channel.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from itertools import chain
from logging import getLogger
from requests.packages.urllib3.util import Url
from ..base.constants import (DEFAULT_CHANNELS_UNIX, DEFAULT_CHANNELS_WIN, MAX_CHANNEL_PRIORITY,
UTF8)
from ..base.context import context
from ..common.compat import iteritems, odict, with_metaclass
from ..common.url import (has_scheme, is_url, is_windows_path, join_url, on_win, path_to_url,
split_conda_url_easy_parts, split_scheme_auth_token, urlparse)
try:
from cytoolz.functoolz import excepts
from cytoolz.itertoolz import concatv, topk
except ImportError:
from .._vendor.toolz.functoolz import excepts # NOQA
from .._vendor.toolz.itertoolz import concatv, topk # NOQA
log = getLogger(__name__)
# backward compatibility for conda-build
def get_conda_build_local_url():
return context.local_build_root,
"""
scheme <> auth <> location <> token <> channel <> subchannel <> platform <> package_filename
channel <> subchannel <> namespace <> package_name
"""
def tokenized_startswith(test_iterable, startswith_iterable):
return all(t == sw for t, sw in zip(test_iterable, startswith_iterable))
def tokenized_conda_url_startswith(test_url, startswith_url):
test_url, startswith_url = urlparse(test_url), urlparse(startswith_url)
if test_url.host != startswith_url.host or test_url.port != startswith_url.port:
return False
norm_url_path = lambda url: url.path.strip('/') or '/'
return tokenized_startswith(norm_url_path(test_url).split('/'),
norm_url_path(startswith_url).split('/'))
def _get_channel_for_name(channel_name):
def _get_channel_for_name_helper(name):
if name in context.custom_channels:
return context.custom_channels[name]
else:
test_name = name.rsplit('/', 1)[0] # progressively strip off path segments
if test_name == name:
return None
return _get_channel_for_name_helper(test_name)
channel = _get_channel_for_name_helper(channel_name)
if channel is not None:
# stripping off path threw information away from channel_name (i.e. any potential subname)
# channel.name *should still be* channel_name
channel.name = channel_name
return channel
else:
ca = context.channel_alias
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=channel_name)
def _read_channel_configuration(scheme, host, port, path):
# return location, name, scheme, auth, token
path = path and path.rstrip('/')
test_url = Url(host=host, port=port, path=path).url
# Step 1. No path given; channel name is None
if not path:
return Url(host=host, port=port).url.rstrip('/'), None, scheme or None, None, None
# Step 2. migrated_custom_channels matches
for name, location in sorted(context.migrated_custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
location, _scheme, _auth, _token = split_scheme_auth_token(location)
if tokenized_conda_url_startswith(test_url, join_url(location, name)):
# translate location to new location, with new credentials
subname = test_url.replace(join_url(location, name), '', 1).strip('/')
channel_name = join_url(name, subname)
channel = _get_channel_for_name(channel_name)
return channel.location, channel_name, channel.scheme, channel.auth, channel.token
# Step 3. migrated_channel_aliases matches
for migrated_alias in context.migrated_channel_aliases:
if test_url.startswith(migrated_alias.location):
name = test_url.replace(migrated_alias.location, '', 1).strip('/')
ca = context.channel_alias
return ca.location, name, ca.scheme, ca.auth, ca.token
# Step 4. custom_channels matches
for name, channel in sorted(context.custom_channels.items(), reverse=True,
key=lambda x: len(x[0])):
that_test_url = join_url(channel.location, channel.name)
if test_url.startswith(that_test_url):
subname = test_url.replace(that_test_url, '', 1).strip('/')
return (channel.location, join_url(channel.name, subname), scheme,
channel.auth, channel.token)
# Step 5. channel_alias match
ca = context.channel_alias
if ca.location and test_url.startswith(ca.location):
name = test_url.replace(ca.location, '', 1).strip('/') or None
return ca.location, name, scheme, ca.auth, ca.token
# Step 6. not-otherwise-specified file://-type urls
if host is None:
# this should probably only happen with a file:// type url
assert port is None
location, name = test_url.rsplit('/', 1)
if not location:
location = '/'
_scheme, _auth, _token = 'file', None, None
return location, name, _scheme, _auth, _token
# Step 7. fall through to host:port as channel_location and path as channel_name
return (Url(host=host, port=port).url.rstrip('/'), path.strip('/') or None,
scheme or None, None, None)
def parse_conda_channel_url(url):
(scheme, auth, token, platform, package_filename,
host, port, path, query) = split_conda_url_easy_parts(url)
# recombine host, port, path to get a channel_name and channel_location
(channel_location, channel_name, configured_scheme, configured_auth,
configured_token) = _read_channel_configuration(scheme, host, port, path)
# if we came out with no channel_location or channel_name, we need to figure it out
# from host, port, path
assert channel_location is not None or channel_name is not None
return Channel(configured_scheme or 'https',
auth or configured_auth,
channel_location,
token or configured_token,
channel_name,
platform,
package_filename)
class ChannelType(type):
"""
This metaclass does basic caching and enables static constructor method usage with a
single arg.
"""
def __call__(cls, *args, **kwargs):
if len(args) == 1 and not kwargs:
value = args[0]
if isinstance(value, Channel):
return value
elif value in Channel._cache_:
return Channel._cache_[value]
else:
c = Channel.from_value(value)
Channel._cache_[value] = c
return c
else:
return super(ChannelType, cls).__call__(*args, **kwargs)
@with_metaclass(ChannelType)
class Channel(object):
_cache_ = dict()
@staticmethod
def _reset_state():
Channel._cache_ = dict()
def __init__(self, scheme=None, auth=None, location=None, token=None, name=None,
platform=None, package_filename=None):
self.scheme = scheme
self.auth = auth
self.location = location
self.token = token
self.name = name
self.platform = platform
self.package_filename = package_filename
@property
def channel_location(self):
return self.location
@property
def channel_name(self):
return self.name
@staticmethod
def from_url(url):
return parse_conda_channel_url(url)
@staticmethod
def from_channel_name(channel_name):
return _get_channel_for_name(channel_name)
@staticmethod
def from_value(value):
if value is None:
return Channel(name="<unknown>")
if hasattr(value, 'decode'):
value = value.decode(UTF8)
if has_scheme(value):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
elif value.startswith(('./', '..', '~', '/')) or is_windows_path(value):
return Channel.from_url(path_to_url(value))
elif value.endswith('.tar.bz2'):
if value.startswith('file:') and on_win:
value = value.replace('\\', '/')
return Channel.from_url(value)
else:
# at this point assume we don't have a bare (non-scheme) url
# e.g. this would be bad: repo.continuum.io/pkgs/free
if value in context.custom_multichannels:
return MultiChannel(value, context.custom_multichannels[value])
else:
return Channel.from_channel_name(value)
@staticmethod
def make_simple_channel(channel_alias, channel_url, name=None):
ca = channel_alias
test_url, scheme, auth, token = split_scheme_auth_token(channel_url)
if name and scheme:
return Channel(scheme=scheme, auth=auth, location=test_url, token=token,
name=name.strip('/'))
if scheme:
if ca.location and test_url.startswith(ca.location):
location, name = ca.location, test_url.replace(ca.location, '', 1)
else:
url_parts = urlparse(test_url)
location, name = Url(host=url_parts.host, port=url_parts.port).url, url_parts.path
return Channel(scheme=scheme, auth=auth, location=location, token=token,
name=name.strip('/'))
else:
return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token,
name=name and name.strip('/') or channel_url.strip('/'))
@property
def canonical_name(self):
for multiname, channels in iteritems(context.custom_multichannels):
for channel in channels:
if self.name == channel.name:
return multiname
for that_name in context.custom_channels:
if self.name and tokenized_startswith(self.name.split('/'), that_name.split('/')):
return self.name
if any(c.location == self.location
for c in concatv((context.channel_alias,), context.migrated_channel_aliases)):
return self.name
# fall back to the equivalent of self.base_url
# re-defining here because base_url for MultiChannel is None
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def urls(self, with_credentials=False, platform=None):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
base = join_url(*base)
def _platforms():
p = platform or self.platform or context.subdir
return (p, 'noarch') if p != 'noarch' else ('noarch',)
bases = (join_url(base, p) for p in _platforms())
if with_credentials and self.auth:
return ["%s://%s@%s" % (self.scheme, self.auth, b) for b in bases]
else:
return ["%s://%s" % (self.scheme, b) for b in bases]
def url(self, with_credentials=False):
base = [self.location]
if with_credentials and self.token:
base.extend(['t', self.token])
base.append(self.name)
if self.platform:
base.append(self.platform)
if self.package_filename:
base.append(self.package_filename)
else:
base.append(context.subdir)
base = join_url(*base)
if with_credentials and self.auth:
return "%s://%s@%s" % (self.scheme, self.auth, base)
else:
return "%s://%s" % (self.scheme, base)
@property
def base_url(self):
return "%s://%s" % (self.scheme, join_url(self.location, self.name))
def __str__(self):
return self.base_url
def __repr__(self):
return ("Channel(scheme=%s, auth=%s, location=%s, token=%s, name=%s, platform=%s, "
"package_filename=%s)" % (self.scheme,
self.auth and "%s:<PASSWORD>" % self.auth.split(':')[0],
self.location,
self.token and "<TOKEN>",
self.name,
self.platform,
self.package_filename))
def __eq__(self, other):
if isinstance(other, Channel):
return self.location == other.location and self.name == other.name
else:
return False
def __hash__(self):
return hash((self.location, self.name))
def __nonzero__(self):
return any((self.location, self.name))
def __bool__(self):
return self.__nonzero__()
def __json__(self):
return self.__dict__
@property
def url_channel_wtf(self):
return self.base_url, self.canonical_name
class MultiChannel(Channel):
def __init__(self, name, channels):
self.name = name
self.location = None
self._channels = channels
self.scheme = None
self.auth = None
self.token = None
self.platform = None
self.package_filename = None
@property
def channel_location(self):
return self.location
@property
def canonical_name(self):
return self.name
def urls(self, with_credentials=False, platform=None):
if platform and platform != context.subdir and self.name == 'defaults':
# necessary shenanigan because different platforms have different default channels
urls = DEFAULT_CHANNELS_WIN if 'win' in platform else DEFAULT_CHANNELS_UNIX
ca = context.channel_alias
_channels = tuple(Channel.make_simple_channel(ca, v) for v in urls)
else:
_channels = self._channels
return list(chain.from_iterable(c.urls(with_credentials, platform) for c in _channels))
@property
def base_url(self):
return None
def url(self, with_credentials=False):
return None
def prioritize_channels(channels, with_credentials=True, platform=None):
# ('https://conda.anaconda.org/conda-forge/osx-64/', ('conda-forge', 1))
result = odict()
for q, chn in enumerate(channels):
channel = Channel(chn)
for url in channel.urls(with_credentials, platform):
if url in result:
continue
result[url] = channel.canonical_name, min(q, MAX_CHANNEL_PRIORITY - 1)
return result
def offline_keep(url):
return not context.offline or not is_url(url) or url.startswith('file:/')
| conda/cli/main_config.py
--- a/conda/cli/main_config.py
+++ b/conda/cli/main_config.py
@@ -13,6 +13,7 @@
from .common import (Completer, add_parser_json, stdout_json_success)
from .. import CondaError
from .._vendor.auxlib.compat import isiterable
+from .._vendor.auxlib.entity import EntityEncoder
from .._vendor.auxlib.type_coercion import boolify
from ..base.context import context
from ..common.configuration import pretty_list, pretty_map
@@ -303,7 +304,8 @@ def execute_config(args, parser):
'verbosity',
)))
if context.json:
- print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': ')))
+ print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': '),
+ cls=EntityEncoder))
else:
print('\n'.join(format_dict(d)))
context.validate_configuration()
conda/models/channel.py
--- a/conda/models/channel.py
+++ b/conda/models/channel.py
@@ -335,6 +335,9 @@ def __nonzero__(self):
def __bool__(self):
return self.__nonzero__()
+ def __json__(self):
+ return self.__dict__
+
@property
def url_channel_wtf(self):
return self.base_url, self.canonical_name |
local install create file: folder
Hi,
A folder named `file:` is always created if I installed conda package locally.
`conda install /local/path/bz2`
related to: https://github.com/conda/conda/issues/3770
```bash
$ conda info
Current conda install:
platform : linux-64
conda version : 4.2.11
conda is private : False
conda-env version : 4.2.11
conda-build version : 2.0.7
python version : 3.5.1.final.0
requests version : 2.9.1
root environment : /home/haichit/anaconda3 (writable)
default environment : /home/haichit/anaconda3
envs directories : /home/haichit/anaconda3/envs
package cache : /home/haichit/anaconda3/pkgs
channel URLs : https://conda.anaconda.org/conda-canary/linux-64
https://conda.anaconda.org/conda-canary/noarch
https://repo.continuum.io/pkgs/free/linux-64
https://repo.continuum.io/pkgs/free/noarch
https://repo.continuum.io/pkgs/pro/linux-64
https://repo.continuum.io/pkgs/pro/noarch
https://conda.anaconda.org/conda-forge/linux-64
https://conda.anaconda.org/conda-forge/noarch
https://conda.anaconda.org/ambermd/linux-64
https://conda.anaconda.org/ambermd/noarch
config file : /home/haichit/.condarc
offline mode : False
```
| conda/misc.py
<|code_start|>
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import, unicode_literals
import os
import re
import shutil
import sys
from collections import defaultdict
from conda.base.constants import DEFAULTS
from conda.core.package_cache import cached_url, is_fetched, is_extracted, find_new_location
from conda.models.dist import Dist
from conda.models.record import Record
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from conda.core.index import get_index
from .base.context import context
from .common.url import path_to_url, is_url
from .compat import iteritems, itervalues
from .models.channel import Channel
from .exceptions import (CondaFileNotFoundError, ParseError, MD5MismatchError,
PackageNotFoundError, CondaRuntimeError)
from .core.linked_data import linked as install_linked, is_linked, linked_data
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, on_win
from .common.disk import rm_rf
from .install import LINK_COPY, LINK_HARD, LINK_SOFT
from .install import try_hard_link
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta.get('files', ())))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
linked = {dist.dist_name: dist for dist in install_linked(prefix)}
index_args = index_args or {}
index = index or {}
verifies = [] # List[Tuple(filename, md5)]
channels = set()
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
raise ParseError('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
raise CondaFileNotFoundError(join(url_p, fn))
url_p = path_to_url(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# is_local: if the tarball is stored locally (file://)
# is_cache: if the tarball is sitting in our cache
is_local = not is_url(url) or url.startswith('file://')
prefix = cached_url(url) if is_local else None
is_cache = prefix is not None
if is_cache:
# Channel information from the cache
schannel = DEFAULTS if prefix == '' else prefix[:-2]
else:
# Channel information from the URL
channel, schannel = Channel(url).url_channel_wtf
prefix = '' if schannel == DEFAULTS else schannel + '::'
fn = prefix + fn
dist = Dist(fn[:-8])
# Add explicit file to index so we'll be sure to see it later
if is_local:
index[dist] = Record(**{
'fn': dist.to_filename(),
'url': url,
'md5': md5,
'build': dist.quad[2],
'build_number': dist.build_number(),
'name': dist.quad[0],
'version': dist.quad[1],
})
verifies.append((fn, md5))
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
# Also remove explicit tarballs from cache, unless the path *is* to the cache
if pkg_path and not is_cache and (is_local or md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
pkg_path, conflict = find_new_location(dist)
pkg_path = join(pkg_path, dist.to_filename())
if conflict:
actions[RM_FETCHED].append(Dist(conflict))
if not is_local:
if dist not in index or index[dist].get('not_fetched'):
channels.add(schannel)
verifies.append((dist.to_filename(), md5))
actions[FETCH].append(dist)
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = dist.dist_name
if name in linked:
actions[UNLINK].append(linked[name])
######################################
# copied from conda/plan.py TODO: refactor
######################################
# check for link action
fetched_dist = dir_path or pkg_path[:-8]
fetched_dir = dirname(fetched_dist)
try:
# Determine what kind of linking is necessary
if not dir_path:
# If not already extracted, create some dummy
# data to test with
rm_rf(fetched_dist)
ppath = join(fetched_dist, 'info')
os.makedirs(ppath)
index_json = join(ppath, 'index.json')
with open(index_json, 'w'):
pass
if context.always_copy:
lt = LINK_COPY
elif try_hard_link(fetched_dir, prefix, dist):
lt = LINK_HARD
elif context.allow_softlinks and not on_win:
lt = LINK_SOFT
else:
lt = LINK_COPY
actions[LINK].append('%s %d' % (dist, lt))
except (OSError, IOError):
actions[LINK].append('%s %d' % (dist, LINK_COPY))
finally:
if not dir_path:
# Remove the dummy data
try:
rm_rf(fetched_dist)
except (OSError, IOError):
pass
######################################
# ^^^^^^^^^^ copied from conda/plan.py
######################################
# Pull the repodata for channels we are using
if channels:
index_args = index_args or {}
index_args = index_args.copy()
index_args['prepend'] = False
index_args['channel_urls'] = list(channels)
index.update(get_index(**index_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(Dist(fn))
if info is None:
raise PackageNotFoundError(fn, "no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
raise MD5MismatchError('MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if on_win and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if on_win and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if on_win and exists(join(context.root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
index_args = index_args or {}
index = get_index(**index_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = dist.dist_name
fn = dist.to_filename()
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist_str = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[Dist(dist_str)] = r.index[dist_str]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, context.subdir, notfound)
raise CondaRuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist
if fkey not in index:
info['not_fetched'] = True
index[fkey] = info
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort({d.quad[0]: d for d in urls.keys()})
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
rm_rf(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, index_args=index_args)
return actions, untracked_files
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in context.envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield context.root_dir
<|code_end|>
| conda/misc.py
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import print_function, division, absolute_import, unicode_literals
import os
import re
import shutil
import sys
from collections import defaultdict
from conda.base.constants import DEFAULTS
from conda.core.package_cache import cached_url, is_fetched, is_extracted, find_new_location
from conda.models.dist import Dist
from conda.models.record import Record
from os.path import (abspath, dirname, expanduser, exists,
isdir, isfile, islink, join, relpath, curdir)
from conda.core.index import get_index
from .base.context import context
from .common.url import path_to_url, is_url
from .compat import iteritems, itervalues
from .models.channel import Channel
from .exceptions import (CondaFileNotFoundError, ParseError, MD5MismatchError,
PackageNotFoundError, CondaRuntimeError)
from .core.linked_data import linked as install_linked, is_linked, linked_data
from .instructions import RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
from .plan import execute_actions
from .resolve import Resolve, MatchSpec
from .utils import md5_file, on_win
from .common.disk import rm_rf
from .install import LINK_COPY, LINK_HARD, LINK_SOFT
from .install import try_hard_link
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist in install_linked(prefix):
meta = is_linked(prefix, dist)
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta.get('files', ())))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions['op_order'] = RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT, UNLINK, LINK, SYMLINK_CONDA
linked = {dist.dist_name: dist for dist in install_linked(prefix)}
index_args = index_args or {}
index = index or {}
verifies = [] # List[Tuple(filename, md5)]
channels = set()
for spec in specs:
if spec == '@EXPLICIT':
continue
# Format: (url|path)(:#md5)?
m = url_pat.match(spec)
if m is None:
raise ParseError('Could not parse explicit URL: %s' % spec)
url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5')
if not is_url(url_p):
if url_p is None:
url_p = curdir
elif not isdir(url_p):
raise CondaFileNotFoundError(join(url_p, fn))
url_p = path_to_url(url_p).rstrip('/')
url = "{0}/{1}".format(url_p, fn)
# is_local: if the tarball is stored locally (file://)
# is_cache: if the tarball is sitting in our cache
is_local = not is_url(url) or url.startswith('file://')
url_prefix = cached_url(url) if is_local else None
is_cache = url_prefix is not None
if is_cache:
# Channel information from the cache
schannel = DEFAULTS if url_prefix == '' else url_prefix[:-2]
else:
# Channel information from the URL
channel, schannel = Channel(url).url_channel_wtf
url_prefix = '' if schannel == DEFAULTS else schannel + '::'
fn = url_prefix + fn
dist = Dist(fn[:-8])
# Add explicit file to index so we'll be sure to see it later
if is_local:
index[dist] = Record(**{
'fn': dist.to_filename(),
'url': url,
'md5': md5,
'build': dist.quad[2],
'build_number': dist.build_number(),
'name': dist.quad[0],
'version': dist.quad[1],
})
verifies.append((fn, md5))
pkg_path = is_fetched(dist)
dir_path = is_extracted(dist)
# Don't re-fetch unless there is an MD5 mismatch
# Also remove explicit tarballs from cache, unless the path *is* to the cache
if pkg_path and not is_cache and (is_local or md5 and md5_file(pkg_path) != md5):
# This removes any extracted copies as well
actions[RM_FETCHED].append(dist)
pkg_path = dir_path = None
# Don't re-extract unless forced, or if we can't check the md5
if dir_path and (force_extract or md5 and not pkg_path):
actions[RM_EXTRACTED].append(dist)
dir_path = None
if not dir_path:
if not pkg_path:
pkg_path, conflict = find_new_location(dist)
pkg_path = join(pkg_path, dist.to_filename())
if conflict:
actions[RM_FETCHED].append(Dist(conflict))
if not is_local:
if dist not in index or index[dist].get('not_fetched'):
channels.add(schannel)
verifies.append((dist.to_filename(), md5))
actions[FETCH].append(dist)
actions[EXTRACT].append(dist)
# unlink any installed package with that name
name = dist.dist_name
if name in linked:
actions[UNLINK].append(linked[name])
######################################
# copied from conda/plan.py TODO: refactor
######################################
# check for link action
fetched_dist = dir_path or pkg_path[:-8]
fetched_dir = dirname(fetched_dist)
try:
# Determine what kind of linking is necessary
if not dir_path:
# If not already extracted, create some dummy
# data to test with
rm_rf(fetched_dist)
ppath = join(fetched_dist, 'info')
os.makedirs(ppath)
index_json = join(ppath, 'index.json')
with open(index_json, 'w'):
pass
if context.always_copy:
lt = LINK_COPY
elif try_hard_link(fetched_dir, prefix, dist):
lt = LINK_HARD
elif context.allow_softlinks and not on_win:
lt = LINK_SOFT
else:
lt = LINK_COPY
actions[LINK].append('%s %d' % (dist, lt))
except (OSError, IOError):
actions[LINK].append('%s %d' % (dist, LINK_COPY))
finally:
if not dir_path:
# Remove the dummy data
try:
rm_rf(fetched_dist)
except (OSError, IOError):
pass
######################################
# ^^^^^^^^^^ copied from conda/plan.py
######################################
# Pull the repodata for channels we are using
if channels:
index_args = index_args or {}
index_args = index_args.copy()
index_args['prepend'] = False
index_args['channel_urls'] = list(channels)
index.update(get_index(**index_args))
# Finish the MD5 verification
for fn, md5 in verifies:
info = index.get(Dist(fn))
if info is None:
raise PackageNotFoundError(fn, "no package '%s' in index" % fn)
if md5 and 'md5' not in info:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
if md5 and info['md5'] != md5:
raise MD5MismatchError('MD5 mismatch for: %s\n spec: %s\n repo: %s'
% (fn, md5, info['md5']))
execute_actions(actions, index=index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if on_win and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if on_win and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if on_win and exists(join(context.root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda and any package that depends on it
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
index_args = index_args or {}
index = get_index(**index_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = dist.dist_name
fn = dist.to_filename()
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist_str = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[Dist(dist_str)] = r.index[dist_str]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, context.subdir, notfound)
raise CondaRuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist
if fkey not in index:
info['not_fetched'] = True
index[fkey] = info
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort({d.quad[0]: d for d in urls.keys()})
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
rm_rf(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, index_args=index_args)
return actions, untracked_files
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in context.envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield context.root_dir
| conda/misc.py
--- a/conda/misc.py
+++ b/conda/misc.py
@@ -77,17 +77,17 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None,
# is_local: if the tarball is stored locally (file://)
# is_cache: if the tarball is sitting in our cache
is_local = not is_url(url) or url.startswith('file://')
- prefix = cached_url(url) if is_local else None
- is_cache = prefix is not None
+ url_prefix = cached_url(url) if is_local else None
+ is_cache = url_prefix is not None
if is_cache:
# Channel information from the cache
- schannel = DEFAULTS if prefix == '' else prefix[:-2]
+ schannel = DEFAULTS if url_prefix == '' else url_prefix[:-2]
else:
# Channel information from the URL
channel, schannel = Channel(url).url_channel_wtf
- prefix = '' if schannel == DEFAULTS else schannel + '::'
+ url_prefix = '' if schannel == DEFAULTS else schannel + '::'
- fn = prefix + fn
+ fn = url_prefix + fn
dist = Dist(fn[:-8])
# Add explicit file to index so we'll be sure to see it later
if is_local: |
Make http timeouts configurable
http://docs.python-requests.org/en/master/user/advanced/#timeouts
Also set defaults to 2x current defaults.
CC @quasiben
| conda/base/context.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import sys
from collections import Sequence
from itertools import chain
from logging import getLogger
from os.path import abspath, basename, dirname, expanduser, isdir, join
from platform import machine
from .constants import DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME, SEARCH_PATH, conda
from .._vendor.auxlib.compat import NoneType, string_types
from .._vendor.auxlib.decorators import memoizedproperty
from .._vendor.auxlib.ish import dals
from .._vendor.auxlib.path import expand
from ..common.compat import iteritems, odict
from ..common.configuration import (Configuration, LoadError, MapParameter, PrimitiveParameter,
SequenceParameter, ValidationError)
from ..common.disk import try_write, conda_bld_ensure_dir
from ..common.url import has_scheme, path_to_url, split_scheme_auth_token, urlparse
from ..exceptions import CondaEnvironmentNotFoundError, CondaValueError
try:
from cytoolz.itertoolz import concat, concatv
except ImportError:
from .._vendor.toolz.itertoolz import concat, concatv
log = getLogger(__name__)
try:
import cio_test # NOQA
except ImportError:
log.info("No cio_test package found.")
_platform_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
}
non_x86_linux_machines = {
'armv6l',
'armv7l',
'ppc64le',
}
_arch_names = {
32: 'x86',
64: 'x86_64',
}
def channel_alias_validation(value):
if value and not has_scheme(value):
return "channel_alias value '%s' must have scheme/protocol." % value
return True
class Context(Configuration):
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
auto_update_conda = PrimitiveParameter(True, aliases=('self_update',))
changeps1 = PrimitiveParameter(True)
create_default_packages = SequenceParameter(string_types)
disallow = SequenceParameter(string_types)
force_32bit = PrimitiveParameter(False)
track_features = SequenceParameter(string_types)
use_pip = PrimitiveParameter(True)
_root_dir = PrimitiveParameter(sys.prefix, aliases=('root_dir',))
# connection details
ssl_verify = PrimitiveParameter(True, parameter_type=string_types + (bool,))
client_ssl_cert = PrimitiveParameter('', aliases=('client_cert',))
client_ssl_cert_key = PrimitiveParameter('', aliases=('client_cert_key',))
proxy_servers = MapParameter(string_types)
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
_channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS,
aliases=('channel_alias',),
validation=channel_alias_validation)
# channels
channels = SequenceParameter(string_types, default=('defaults',))
_migrated_channel_aliases = SequenceParameter(string_types,
aliases=('migrated_channel_aliases',)) # TODO: also take a list of strings # NOQA
_default_channels = SequenceParameter(string_types, DEFAULT_CHANNELS,
aliases=('default_channels',))
_custom_channels = MapParameter(string_types, aliases=('custom_channels',))
migrated_custom_channels = MapParameter(string_types) # TODO: also take a list of strings
_custom_multichannels = MapParameter(Sequence, aliases=('custom_multichannels',))
# command line
always_copy = PrimitiveParameter(False, aliases=('copy',))
always_yes = PrimitiveParameter(False, aliases=('yes',))
channel_priority = PrimitiveParameter(True)
debug = PrimitiveParameter(False)
json = PrimitiveParameter(False)
offline = PrimitiveParameter(False)
quiet = PrimitiveParameter(False)
shortcuts = PrimitiveParameter(True)
show_channel_urls = PrimitiveParameter(None, parameter_type=(bool, NoneType))
update_dependencies = PrimitiveParameter(True, aliases=('update_deps',))
verbosity = PrimitiveParameter(0, aliases=('verbose',), parameter_type=int)
# conda_build
bld_path = PrimitiveParameter('')
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
_croot = PrimitiveParameter('', aliases=('croot',))
conda_build = MapParameter(string_types, aliases=('conda-build',))
@property
def croot(self):
"""This is where source caches and work folders live"""
if self._croot:
return abspath(expanduser(self._croot))
elif self.bld_path:
return abspath(expanduser(self.bld_path))
elif 'root-dir' in self.conda_build:
return abspath(expanduser(self.conda_build['root-dir']))
elif self.root_writable:
return join(self.root_dir, 'conda-bld')
else:
return abspath(expanduser('~/conda-bld'))
@property
def src_cache(self):
path = join(self.croot, 'src_cache')
conda_bld_ensure_dir(path)
return path
@property
def git_cache(self):
path = join(self.croot, 'git_cache')
conda_bld_ensure_dir(path)
return path
@property
def hg_cache(self):
path = join(self.croot, 'hg_cache')
conda_bld_ensure_dir(path)
return path
@property
def svn_cache(self):
path = join(self.croot, 'svn_cache')
conda_bld_ensure_dir(path)
return path
def post_build_validation(self):
errors = []
if self.client_ssl_cert_key and not self.client_ssl_cert:
error = ValidationError('client_ssl_cert', self.client_ssl_cert, "<<merged>>",
"'client_ssl_cert' is required when 'client_ssl_cert_key' "
"is defined")
errors.append(error)
return errors
_envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs', 'envs_path'),
string_delimiter=os.pathsep)
@property
def default_python(self):
ver = sys.version_info
return '%d.%d' % (ver.major, ver.minor)
@property
def arch_name(self):
m = machine()
if self.platform == 'linux' and m in non_x86_linux_machines:
return m
else:
return _arch_names[self.bits]
@property
def platform(self):
return _platform_map.get(sys.platform, 'unknown')
@property
def subdir(self):
m = machine()
if m in non_x86_linux_machines:
return 'linux-%s' % m
else:
return '%s-%d' % (self.platform, self.bits)
@property
def bits(self):
if self.force_32bit:
return 32
else:
return 8 * tuple.__itemsize__
@property
def local_build_root(self):
# TODO: import from conda_build, and fall back to something incredibly simple
if self.bld_path:
return expand(self.bld_path)
elif self.root_writable:
return join(self.conda_prefix, 'conda-bld')
else:
return expand('~/conda-bld')
@property
def root_dir(self):
# root_dir is an alias for root_prefix, we prefer the name "root_prefix"
# because it is more consistent with other names
return abspath(expanduser(self._root_dir))
@property
def root_writable(self):
return try_write(self.root_dir)
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
for p in concatv(self._envs_dirs,
(join(self.root_dir, 'envs'), )
if self.root_writable
else ('~/.conda/envs', join(self.root_dir, 'envs'))))
@property
def pkgs_dirs(self):
return [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in self.envs_dirs]
@property
def default_prefix(self):
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, ROOT_ENV_NAME):
return self.root_dir
elif os.sep in _default_env:
return abspath(_default_env)
else:
for envs_dir in self.envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
return default_prefix
return join(self.envs_dirs[0], _default_env)
@property
def prefix(self):
return get_prefix(self, self._argparse_args, False)
@property
def prefix_w_legacy_search(self):
return get_prefix(self, self._argparse_args, True)
@property
def clone_src(self):
assert self._argparse_args.clone is not None
return locate_prefix_by_name(self, self._argparse_args.clone)
@property
def conda_in_root(self):
return not conda_in_private_env()
@property
def conda_private(self):
return conda_in_private_env()
@property
def root_prefix(self):
return abspath(join(sys.prefix, '..', '..')) if conda_in_private_env() else sys.prefix
@property
def conda_prefix(self):
return sys.prefix
@memoizedproperty
def channel_alias(self):
from ..models.channel import Channel
location, scheme, auth, token = split_scheme_auth_token(self._channel_alias)
return Channel(scheme=scheme, auth=auth, location=location, token=token)
@property
def migrated_channel_aliases(self):
from ..models.channel import Channel
return tuple(Channel(scheme=scheme, auth=auth, location=location, token=token)
for location, scheme, auth, token in
(split_scheme_auth_token(c) for c in self._migrated_channel_aliases))
@memoizedproperty
def default_channels(self):
# the format for 'default_channels' is a list of strings that either
# - start with a scheme
# - are meant to be prepended with channel_alias
from ..models.channel import Channel
return tuple(Channel.make_simple_channel(self.channel_alias, v)
for v in self._default_channels)
@memoizedproperty
def local_build_root_channel(self):
from ..models.channel import Channel
url_parts = urlparse(path_to_url(self.local_build_root))
location, name = url_parts.path.rsplit('/', 1)
if not location:
location = '/'
return Channel(scheme=url_parts.scheme, location=location, name=name)
@memoizedproperty
def custom_multichannels(self):
from ..models.channel import Channel
default_custom_multichannels = {
'defaults': self.default_channels,
'local': (self.local_build_root_channel,),
}
all_channels = default_custom_multichannels, self._custom_multichannels
return odict((name, tuple(Channel(v) for v in c))
for name, c in concat(map(iteritems, all_channels)))
@memoizedproperty
def custom_channels(self):
from ..models.channel import Channel
custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
for name, url in iteritems(self._custom_channels))
all_sources = self.default_channels, (self.local_build_root_channel,), custom_channels
all_channels = (ch for ch in concat(all_sources))
return odict((x.name, x) for x in all_channels)
def conda_in_private_env():
# conda is located in its own private environment named '_conda'
return basename(sys.prefix) == '_conda' and basename(dirname(sys.prefix)) == 'envs'
def reset_context(search_path=SEARCH_PATH, argparse_args=None):
context.__init__(search_path, conda, argparse_args)
from ..models.channel import Channel
Channel._reset_state()
return context
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(context.root_dir, 'envs')):
return join(context.root_dir, 'pkgs32' if context.force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
def get_help_dict():
# this is a function so that most of the time it's not evaluated and loaded into memory
return {
'add_pip_as_python_dependency': dals("""
"""),
'always_yes': dals("""
"""),
'always_copy': dals("""
"""),
'changeps1': dals("""
"""),
'use_pip': dals("""
Use pip when listing packages with conda list. Note that this does not affect any
conda command or functionality other than the output of the command conda list.
"""),
'binstar_upload': dals("""
"""),
'allow_softlinks': dals("""
"""),
'self_update': dals("""
"""),
'show_channel_urls': dals("""
# show channel URLs when displaying what is going to be downloaded
# None means letting conda decide
"""),
'update_dependencies': dals("""
"""),
'channel_priority': dals("""
"""),
'ssl_verify': dals("""
# ssl_verify can be a boolean value or a filename string
"""),
'client_ssl_cert': dals("""
# client_ssl_cert can be a path pointing to a single file
# containing the private key and the certificate (e.g. .pem),
# or use 'client_ssl_cert_key' in conjuction with 'client_ssl_cert' for
# individual files
"""),
'client_ssl_cert_key': dals("""
# used in conjunction with 'client_ssl_cert' for a matching key file
"""),
'track_features': dals("""
"""),
'channels': dals("""
"""),
'disallow': dals("""
# set packages disallowed to be installed
"""),
'create_default_packages': dals("""
# packages which are added to a newly created environment by default
"""),
'envs_dirs': dals("""
"""),
'default_channels': dals("""
"""),
'proxy_servers': dals("""
"""),
'force_32bit': dals("""
CONDA_FORCE_32BIT should only be used when running conda-build (in order
to build 32-bit packages on a 64-bit system). We don't want to mention it
in the documentation, because it can mess up a lot of things.
""")
}
def get_prefix(ctx, args, search=True):
"""Get the prefix to operate in
Args:
ctx: the context of conda
args: the argparse args from the command line
search: whether search for prefix
Returns: the prefix
Raises: CondaEnvironmentNotFoundError if the prefix is invalid
"""
if args.name:
if '/' in args.name:
raise CondaValueError("'/' not allowed in environment name: %s" %
args.name, getattr(args, 'json', False))
if args.name == ROOT_ENV_NAME:
return ctx.root_dir
if search:
return locate_prefix_by_name(ctx, args.name)
else:
return join(ctx.envs_dirs[0], args.name)
elif args.prefix:
return abspath(expanduser(args.prefix))
else:
return ctx.default_prefix
def locate_prefix_by_name(ctx, name):
""" Find the location of a prefix given a conda env name.
Args:
ctx (Context): the context object
name (str): the name of prefix to find
Returns:
str: the location of the prefix found, or CondaValueError will raise if not found
Raises:
CondaValueError: when no prefix is found
"""
if name == ROOT_ENV_NAME:
return ctx.root_dir
# look for a directory named `name` in all envs_dirs AND in CWD
for envs_dir in chain(ctx.envs_dirs + (os.getcwd(),)):
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
raise CondaEnvironmentNotFoundError(name)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(context.root_dir) and
not context.root_writable)
try:
context = Context(SEARCH_PATH, conda, None)
except LoadError as e:
print(e, file=sys.stderr)
# Exception handler isn't loaded so use sys.exit
sys.exit(1)
<|code_end|>
conda/core/index.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import bz2
import hashlib
import json
import requests
import warnings
from functools import wraps
from logging import DEBUG, getLogger
from os import makedirs
from os.path import dirname, join
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from .linked_data import linked_data
from .package_cache import package_cache
from .._vendor.auxlib.entity import EntityEncoder
from .._vendor.auxlib.ish import dals
from .._vendor.auxlib.logz import stringify
from ..base.constants import CONDA_HOMEPAGE_URL, DEFAULTS, MAX_CHANNEL_PRIORITY
from ..base.context import context
from ..common.compat import iteritems, itervalues
from ..common.url import join_url, url_to_path
from ..connection import CondaSession
from ..exceptions import CondaHTTPError, CondaRuntimeError
from ..lock import FileLock
from ..models.channel import Channel, offline_keep, prioritize_channels
from ..models.dist import Dist
from ..models.record import EMPTY_LINK, Record
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def get_index(channel_urls=(), prepend=True, platform=None,
use_local=False, use_cache=False, unknown=False, prefix=False):
"""
Return the index of packages available on the channels
If prepend=False, only the channels passed in as arguments are used.
If platform=None, then the current platform is used.
If prefix is supplied, then the packages installed in that prefix are added.
"""
if use_local:
channel_urls = ['local'] + list(channel_urls)
if prepend:
channel_urls += context.channels
channel_urls = prioritize_channels(channel_urls, platform=platform)
index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown)
# supplement index with information from prefix/conda-meta
if prefix:
priorities = {chnl: prrty for chnl, prrty in itervalues(channel_urls)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(linked_data(prefix)):
fn = info['fn']
schannel = info['schannel']
prefix = '' if schannel == DEFAULTS else schannel + '::'
priority = priorities.get(schannel, maxp)
key = Dist(prefix + fn)
if key in index:
# Copy the link information so the resolver knows this is installed
index[key] = index[key].copy()
index[key]['link'] = info.get('link') or EMPTY_LINK
else:
# only if the package in not in the repodata, use local
# conda-meta (with 'depends' defaulting to [])
info.setdefault('depends', [])
# If the schannel is known but the package is not in the index, it is
# because 1) the channel is unavailable offline or 2) the package has
# been removed from that channel. Either way, we should prefer any
# other version of the package to this one.
info['priority'] = MAX_CHANNEL_PRIORITY if schannel in priorities else priority
index[key] = info
return index
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not offline_keep(url):
return {'packages': {}}
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
log.debug("Opening repodata cache for %s at %s", url, cache_path)
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not context.ssl_verify:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url or url.startswith("file://"):
filename = 'repodata.json.bz2'
headers['Accept-Encoding'] = 'identity'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
resp = session.get(join_url(url, filename), headers=headers, proxies=session.proxies,
timeout=(3.05, 60))
if log.isEnabledFor(DEBUG):
log.debug(stringify(resp))
resp.raise_for_status()
if resp.status_code != 304:
def get_json_str(filename, resp_content):
if filename.endswith('.bz2'):
return bz2.decompress(resp_content).decode('utf-8')
else:
return resp_content.decode('utf-8')
if url.startswith('file://'):
file_path = url_to_path(url)
with FileLock(dirname(file_path)):
json_str = get_json_str(filename, resp.content)
else:
json_str = get_json_str(filename, resp.content)
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise CondaRuntimeError("Invalid index file: {0}: {1}".format(join_url(url, filename), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
if url.endswith('/noarch'): # noarch directory might not exist
return None
help_message = dals("""
The remote server could not find the channel you requested.
You will need to adjust your conda configuration to proceed.
Use `conda config --show` to view your configuration's current state.
Further configuration help can be found at <%s>.
""" % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html'))
elif e.response.status_code == 403:
if url.endswith('/noarch'):
return None
else:
help_message = dals("""
The channel you requested is not available on the remote server.
You will need to adjust your conda configuration to proceed.
Use `conda config --show` to view your configuration's current state.
Further configuration help can be found at <%s>.
""" % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html'))
elif e.response.status_code == 401:
channel = Channel(url)
if channel.token:
help_message = dals("""
The token '%s' given for the URL is invalid.
If this token was pulled from anaconda-client, you will need to use
anaconda-client to reauthenticate.
If you supplied this token to conda directly, you will need to adjust your
conda configuration to proceed.
Use `conda config --show` to view your configuration's current state.
Further configuration help can be found at <%s>.
""" % (channel.token, join_url(CONDA_HOMEPAGE_URL, 'docs/config.html')))
elif context.channel_alias.location in url:
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
help_message = dals("""
The remote server has indicated you are using invalid credentials for this channel.
If the remote site is anaconda.org or follows the Anaconda Server API, you
will need to
(a) login to the site with `anaconda login`, or
(b) provide conda with a valid token directly.
Further configuration help can be found at <%s>.
""" % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html'))
else:
help_message = dals("""
The credentials you have provided for this URL are invalid.
You will need to modify your conda configuration to proceed.
Use `conda config --show` to view your configuration's current state.
Further configuration help can be found at <%s>.
""" % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html'))
elif 500 <= e.response.status_code < 600:
help_message = dals("""
An remote server error occurred when trying to retrieve this URL.
A 500-type error (e.g. 500, 501, 502, 503, etc.) indicates the server failed to
fulfill a valid request. The problem may be spurious, and will resolve itself if you
try your request again. If the problem persists, consider notifying the maintainer
of the remote server.
""")
else:
help_message = "An HTTP error occurred when trying to retrieve this URL."
raise CondaHTTPError(help_message, e.response.url, e.response.status_code,
e.response.reason)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
if fail_unknown_host:
raise CondaRuntimeError(msg)
raise CondaRuntimeError(msg)
cache['_url'] = url
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True, cls=EntityEncoder)
except IOError:
pass
return cache or None
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
if not context.json:
stdoutlog.info("Fetching package metadata ...")
urls = tuple(filter(offline_keep, channel_urls))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError) as e:
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
log.debug(repr(e))
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
else:
try:
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
except RuntimeError as e:
# Cannot start new thread, then give up parallel execution
log.debug(repr(e))
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
finally:
executor.shutdown(wait=True)
def make_index(repodatas):
result = dict()
for channel_url, repodata in repodatas:
if repodata is None:
continue
canonical_name, priority = channel_urls[channel_url]
channel = Channel(channel_url)
for fn, info in iteritems(repodata['packages']):
full_url = join_url(channel_url, fn)
info.update(dict(fn=fn,
schannel=canonical_name,
channel=channel_url,
priority=priority,
url=full_url,
auth=channel.auth,
))
key = Dist(canonical_name + '::' + fn if canonical_name != 'defaults' else fn)
result[key] = Record(**info)
return result
index = make_index(repodatas)
if not context.json:
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if context.add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
# schannel, dname = dist2pair(dist)
fname = dist.to_filename()
# fkey = dist + '.tar.bz2'
if dist in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = Record(**json.load(fi))
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif meta.get('url'):
url = meta['url']
elif meta.get('channel'):
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = Channel(url).url_channel_wtf
if schannel2 != dist.channel:
continue
priority = priorities.get(dist.channel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname,
'url': url,
'channel': channel,
'schannel': dist.channel,
'priority': priority,
})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % dist)
index[dist] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if info['name'] == 'python' and info['version'].startswith(('2.', '3.')):
info['depends'] = info['depends'] + ('pip',)
def create_cache_dir():
cache_dir = join(context.pkgs_dirs[0], 'cache')
try:
makedirs(cache_dir)
except OSError:
pass
return cache_dir
<|code_end|>
conda/core/package_cache.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import hashlib
import os
import requests
import sys
import tarfile
import warnings
from logging import getLogger
from os.path import basename, dirname, exists, isdir, isfile, join
from ..base.constants import DEFAULTS
from ..base.context import context
from ..common.disk import exp_backoff_fn, rm_rf
from ..common.url import path_to_url, maybe_add_auth
from ..connection import CondaSession, RETRIES
from ..exceptions import CondaRuntimeError, CondaSignatureError, MD5MismatchError
from ..lock import FileLock
from ..models.channel import Channel, offline_keep
from ..models.dist import Dist
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = url
# make dist
schannel = Channel(url).canonical_name
prefix = '' if schannel == DEFAULTS else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
fkey = prefix + dist
dist = Dist(fkey)
rec = package_cache_.get(dist)
if rec is None:
rec = package_cache_[dist] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in context.pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in context.pkgs_dirs:
pkg_path = join(pkg_dir, dist.to_filename())
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dist.dist_name if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[path_to_url(fname)]
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("File not removed during RM_FETCHED instruction: %s", fname)
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_FETCHED instruction: %s", fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_EXTRACTED instruction: %s", fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
path = fname[:-8]
with FileLock(path):
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url') or info['channel'] + '/' + fn
url = maybe_add_auth(url, info.get('auth'))
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(Dist(fn))[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from ..signature import verify
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except CondaSignatureError:
raise
raise CondaSignatureError("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None):
assert "::" not in str(dst_path), str(dst_path)
if not offline_keep(url):
raise RuntimeError("Cannot download in offline mode: %s" % (url,))
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not context.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with FileLock(dst_path):
rm_rf(dst_path)
try:
resp = session.get(url, stream=True, proxies=session.proxies, timeout=(3.05, 27))
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise CondaRuntimeError(msg)
except requests.exceptions.ConnectionError as e:
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise CondaRuntimeError(msg)
except IOError as e:
raise CondaRuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
index = 0
for chunk in resp.iter_content(2**14):
index += len(chunk)
try:
fo.write(chunk)
except IOError:
raise CondaRuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
if size and 0 <= index <= size:
getLogger('fetch.update').info(index)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise CondaRuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise MD5MismatchError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
exp_backoff_fn(os.rename, pp, dst_path)
except OSError as e:
raise CondaRuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
<|code_end|>
| conda/base/context.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import sys
from collections import Sequence
from itertools import chain
from logging import getLogger
from os.path import abspath, basename, dirname, expanduser, isdir, join
from platform import machine
from .constants import DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME, SEARCH_PATH, conda
from .._vendor.auxlib.compat import NoneType, string_types
from .._vendor.auxlib.decorators import memoizedproperty
from .._vendor.auxlib.ish import dals
from .._vendor.auxlib.path import expand
from ..common.compat import iteritems, odict
from ..common.configuration import (Configuration, LoadError, MapParameter, PrimitiveParameter,
SequenceParameter, ValidationError)
from ..common.disk import try_write, conda_bld_ensure_dir
from ..common.url import has_scheme, path_to_url, split_scheme_auth_token, urlparse
from ..exceptions import CondaEnvironmentNotFoundError, CondaValueError
try:
from cytoolz.itertoolz import concat, concatv
except ImportError:
from .._vendor.toolz.itertoolz import concat, concatv
log = getLogger(__name__)
try:
import cio_test # NOQA
except ImportError:
log.info("No cio_test package found.")
_platform_map = {
'linux2': 'linux',
'linux': 'linux',
'darwin': 'osx',
'win32': 'win',
}
non_x86_linux_machines = {
'armv6l',
'armv7l',
'ppc64le',
}
_arch_names = {
32: 'x86',
64: 'x86_64',
}
def channel_alias_validation(value):
if value and not has_scheme(value):
return "channel_alias value '%s' must have scheme/protocol." % value
return True
class Context(Configuration):
add_pip_as_python_dependency = PrimitiveParameter(True)
allow_softlinks = PrimitiveParameter(True)
auto_update_conda = PrimitiveParameter(True, aliases=('self_update',))
changeps1 = PrimitiveParameter(True)
create_default_packages = SequenceParameter(string_types)
disallow = SequenceParameter(string_types)
force_32bit = PrimitiveParameter(False)
track_features = SequenceParameter(string_types)
use_pip = PrimitiveParameter(True)
_root_dir = PrimitiveParameter(sys.prefix, aliases=('root_dir',))
# connection details
ssl_verify = PrimitiveParameter(True, parameter_type=string_types + (bool,))
client_ssl_cert = PrimitiveParameter('', aliases=('client_cert',))
client_ssl_cert_key = PrimitiveParameter('', aliases=('client_cert_key',))
proxy_servers = MapParameter(string_types)
add_anaconda_token = PrimitiveParameter(True, aliases=('add_binstar_token',))
_channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS,
aliases=('channel_alias',),
validation=channel_alias_validation)
http_connect_timeout_secs = PrimitiveParameter(6.1)
http_read_timeout_secs = PrimitiveParameter(60.)
# channels
channels = SequenceParameter(string_types, default=('defaults',))
_migrated_channel_aliases = SequenceParameter(string_types,
aliases=('migrated_channel_aliases',)) # TODO: also take a list of strings # NOQA
_default_channels = SequenceParameter(string_types, DEFAULT_CHANNELS,
aliases=('default_channels',))
_custom_channels = MapParameter(string_types, aliases=('custom_channels',))
migrated_custom_channels = MapParameter(string_types) # TODO: also take a list of strings
_custom_multichannels = MapParameter(Sequence, aliases=('custom_multichannels',))
# command line
always_copy = PrimitiveParameter(False, aliases=('copy',))
always_yes = PrimitiveParameter(False, aliases=('yes',))
channel_priority = PrimitiveParameter(True)
debug = PrimitiveParameter(False)
json = PrimitiveParameter(False)
offline = PrimitiveParameter(False)
quiet = PrimitiveParameter(False)
shortcuts = PrimitiveParameter(True)
show_channel_urls = PrimitiveParameter(None, parameter_type=(bool, NoneType))
update_dependencies = PrimitiveParameter(True, aliases=('update_deps',))
verbosity = PrimitiveParameter(0, aliases=('verbose',), parameter_type=int)
# conda_build
bld_path = PrimitiveParameter('')
binstar_upload = PrimitiveParameter(None, aliases=('anaconda_upload',),
parameter_type=(bool, NoneType))
_croot = PrimitiveParameter('', aliases=('croot',))
conda_build = MapParameter(string_types, aliases=('conda-build',))
@property
def croot(self):
"""This is where source caches and work folders live"""
if self._croot:
return abspath(expanduser(self._croot))
elif self.bld_path:
return abspath(expanduser(self.bld_path))
elif 'root-dir' in self.conda_build:
return abspath(expanduser(self.conda_build['root-dir']))
elif self.root_writable:
return join(self.root_dir, 'conda-bld')
else:
return abspath(expanduser('~/conda-bld'))
@property
def src_cache(self):
path = join(self.croot, 'src_cache')
conda_bld_ensure_dir(path)
return path
@property
def git_cache(self):
path = join(self.croot, 'git_cache')
conda_bld_ensure_dir(path)
return path
@property
def hg_cache(self):
path = join(self.croot, 'hg_cache')
conda_bld_ensure_dir(path)
return path
@property
def svn_cache(self):
path = join(self.croot, 'svn_cache')
conda_bld_ensure_dir(path)
return path
def post_build_validation(self):
errors = []
if self.client_ssl_cert_key and not self.client_ssl_cert:
error = ValidationError('client_ssl_cert', self.client_ssl_cert, "<<merged>>",
"'client_ssl_cert' is required when 'client_ssl_cert_key' "
"is defined")
errors.append(error)
return errors
_envs_dirs = SequenceParameter(string_types, aliases=('envs_dirs', 'envs_path'),
string_delimiter=os.pathsep)
@property
def default_python(self):
ver = sys.version_info
return '%d.%d' % (ver.major, ver.minor)
@property
def arch_name(self):
m = machine()
if self.platform == 'linux' and m in non_x86_linux_machines:
return m
else:
return _arch_names[self.bits]
@property
def platform(self):
return _platform_map.get(sys.platform, 'unknown')
@property
def subdir(self):
m = machine()
if m in non_x86_linux_machines:
return 'linux-%s' % m
else:
return '%s-%d' % (self.platform, self.bits)
@property
def bits(self):
if self.force_32bit:
return 32
else:
return 8 * tuple.__itemsize__
@property
def local_build_root(self):
# TODO: import from conda_build, and fall back to something incredibly simple
if self.bld_path:
return expand(self.bld_path)
elif self.root_writable:
return join(self.conda_prefix, 'conda-bld')
else:
return expand('~/conda-bld')
@property
def root_dir(self):
# root_dir is an alias for root_prefix, we prefer the name "root_prefix"
# because it is more consistent with other names
return abspath(expanduser(self._root_dir))
@property
def root_writable(self):
return try_write(self.root_dir)
@property
def envs_dirs(self):
return tuple(abspath(expanduser(p))
for p in concatv(self._envs_dirs,
(join(self.root_dir, 'envs'), )
if self.root_writable
else ('~/.conda/envs', join(self.root_dir, 'envs'))))
@property
def pkgs_dirs(self):
return [pkgs_dir_from_envs_dir(envs_dir) for envs_dir in self.envs_dirs]
@property
def default_prefix(self):
_default_env = os.getenv('CONDA_DEFAULT_ENV')
if _default_env in (None, ROOT_ENV_NAME):
return self.root_dir
elif os.sep in _default_env:
return abspath(_default_env)
else:
for envs_dir in self.envs_dirs:
default_prefix = join(envs_dir, _default_env)
if isdir(default_prefix):
return default_prefix
return join(self.envs_dirs[0], _default_env)
@property
def prefix(self):
return get_prefix(self, self._argparse_args, False)
@property
def prefix_w_legacy_search(self):
return get_prefix(self, self._argparse_args, True)
@property
def clone_src(self):
assert self._argparse_args.clone is not None
return locate_prefix_by_name(self, self._argparse_args.clone)
@property
def conda_in_root(self):
return not conda_in_private_env()
@property
def conda_private(self):
return conda_in_private_env()
@property
def root_prefix(self):
return abspath(join(sys.prefix, '..', '..')) if conda_in_private_env() else sys.prefix
@property
def conda_prefix(self):
return sys.prefix
@memoizedproperty
def channel_alias(self):
from ..models.channel import Channel
location, scheme, auth, token = split_scheme_auth_token(self._channel_alias)
return Channel(scheme=scheme, auth=auth, location=location, token=token)
@property
def migrated_channel_aliases(self):
from ..models.channel import Channel
return tuple(Channel(scheme=scheme, auth=auth, location=location, token=token)
for location, scheme, auth, token in
(split_scheme_auth_token(c) for c in self._migrated_channel_aliases))
@memoizedproperty
def default_channels(self):
# the format for 'default_channels' is a list of strings that either
# - start with a scheme
# - are meant to be prepended with channel_alias
from ..models.channel import Channel
return tuple(Channel.make_simple_channel(self.channel_alias, v)
for v in self._default_channels)
@memoizedproperty
def local_build_root_channel(self):
from ..models.channel import Channel
url_parts = urlparse(path_to_url(self.local_build_root))
location, name = url_parts.path.rsplit('/', 1)
if not location:
location = '/'
return Channel(scheme=url_parts.scheme, location=location, name=name)
@memoizedproperty
def custom_multichannels(self):
from ..models.channel import Channel
default_custom_multichannels = {
'defaults': self.default_channels,
'local': (self.local_build_root_channel,),
}
all_channels = default_custom_multichannels, self._custom_multichannels
return odict((name, tuple(Channel(v) for v in c))
for name, c in concat(map(iteritems, all_channels)))
@memoizedproperty
def custom_channels(self):
from ..models.channel import Channel
custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
for name, url in iteritems(self._custom_channels))
all_sources = self.default_channels, (self.local_build_root_channel,), custom_channels
all_channels = (ch for ch in concat(all_sources))
return odict((x.name, x) for x in all_channels)
def conda_in_private_env():
# conda is located in its own private environment named '_conda'
return basename(sys.prefix) == '_conda' and basename(dirname(sys.prefix)) == 'envs'
def reset_context(search_path=SEARCH_PATH, argparse_args=None):
context.__init__(search_path, conda, argparse_args)
from ..models.channel import Channel
Channel._reset_state()
return context
def pkgs_dir_from_envs_dir(envs_dir):
if abspath(envs_dir) == abspath(join(context.root_dir, 'envs')):
return join(context.root_dir, 'pkgs32' if context.force_32bit else 'pkgs')
else:
return join(envs_dir, '.pkgs')
def get_help_dict():
# this is a function so that most of the time it's not evaluated and loaded into memory
return {
'add_pip_as_python_dependency': dals("""
"""),
'always_yes': dals("""
"""),
'always_copy': dals("""
"""),
'changeps1': dals("""
"""),
'use_pip': dals("""
Use pip when listing packages with conda list. Note that this does not affect any
conda command or functionality other than the output of the command conda list.
"""),
'binstar_upload': dals("""
"""),
'allow_softlinks': dals("""
"""),
'self_update': dals("""
"""),
'show_channel_urls': dals("""
# show channel URLs when displaying what is going to be downloaded
# None means letting conda decide
"""),
'update_dependencies': dals("""
"""),
'channel_priority': dals("""
"""),
'ssl_verify': dals("""
# ssl_verify can be a boolean value or a filename string
"""),
'client_ssl_cert': dals("""
# client_ssl_cert can be a path pointing to a single file
# containing the private key and the certificate (e.g. .pem),
# or use 'client_ssl_cert_key' in conjuction with 'client_ssl_cert' for
# individual files
"""),
'client_ssl_cert_key': dals("""
# used in conjunction with 'client_ssl_cert' for a matching key file
"""),
'track_features': dals("""
"""),
'channels': dals("""
"""),
'disallow': dals("""
# set packages disallowed to be installed
"""),
'create_default_packages': dals("""
# packages which are added to a newly created environment by default
"""),
'envs_dirs': dals("""
"""),
'default_channels': dals("""
"""),
'proxy_servers': dals("""
"""),
'force_32bit': dals("""
CONDA_FORCE_32BIT should only be used when running conda-build (in order
to build 32-bit packages on a 64-bit system). We don't want to mention it
in the documentation, because it can mess up a lot of things.
""")
}
def get_prefix(ctx, args, search=True):
"""Get the prefix to operate in
Args:
ctx: the context of conda
args: the argparse args from the command line
search: whether search for prefix
Returns: the prefix
Raises: CondaEnvironmentNotFoundError if the prefix is invalid
"""
if args.name:
if '/' in args.name:
raise CondaValueError("'/' not allowed in environment name: %s" %
args.name, getattr(args, 'json', False))
if args.name == ROOT_ENV_NAME:
return ctx.root_dir
if search:
return locate_prefix_by_name(ctx, args.name)
else:
return join(ctx.envs_dirs[0], args.name)
elif args.prefix:
return abspath(expanduser(args.prefix))
else:
return ctx.default_prefix
def locate_prefix_by_name(ctx, name):
""" Find the location of a prefix given a conda env name.
Args:
ctx (Context): the context object
name (str): the name of prefix to find
Returns:
str: the location of the prefix found, or CondaValueError will raise if not found
Raises:
CondaValueError: when no prefix is found
"""
if name == ROOT_ENV_NAME:
return ctx.root_dir
# look for a directory named `name` in all envs_dirs AND in CWD
for envs_dir in chain(ctx.envs_dirs + (os.getcwd(),)):
prefix = join(envs_dir, name)
if isdir(prefix):
return prefix
raise CondaEnvironmentNotFoundError(name)
def check_write(command, prefix, json=False):
if inroot_notwritable(prefix):
from conda.cli.help import root_read_only
root_read_only(command, prefix, json=json)
def inroot_notwritable(prefix):
"""
return True if the prefix is under root and root is not writeable
"""
return (abspath(prefix).startswith(context.root_dir) and
not context.root_writable)
try:
context = Context(SEARCH_PATH, conda, None)
except LoadError as e:
print(e, file=sys.stderr)
# Exception handler isn't loaded so use sys.exit
sys.exit(1)
conda/core/index.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import bz2
import hashlib
import json
import requests
import warnings
from functools import wraps
from logging import DEBUG, getLogger
from os import makedirs
from os.path import dirname, join
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from .linked_data import linked_data
from .package_cache import package_cache
from .._vendor.auxlib.entity import EntityEncoder
from .._vendor.auxlib.ish import dals
from .._vendor.auxlib.logz import stringify
from ..base.constants import CONDA_HOMEPAGE_URL, DEFAULTS, MAX_CHANNEL_PRIORITY
from ..base.context import context
from ..common.compat import iteritems, itervalues
from ..common.url import join_url, url_to_path
from ..connection import CondaSession
from ..exceptions import CondaHTTPError, CondaRuntimeError
from ..lock import FileLock
from ..models.channel import Channel, offline_keep, prioritize_channels
from ..models.dist import Dist
from ..models.record import EMPTY_LINK, Record
log = getLogger(__name__)
dotlog = getLogger('dotupdate')
stdoutlog = getLogger('stdoutlog')
stderrlog = getLogger('stderrlog')
fail_unknown_host = False
def get_index(channel_urls=(), prepend=True, platform=None,
use_local=False, use_cache=False, unknown=False, prefix=False):
"""
Return the index of packages available on the channels
If prepend=False, only the channels passed in as arguments are used.
If platform=None, then the current platform is used.
If prefix is supplied, then the packages installed in that prefix are added.
"""
if use_local:
channel_urls = ['local'] + list(channel_urls)
if prepend:
channel_urls += context.channels
channel_urls = prioritize_channels(channel_urls, platform=platform)
index = fetch_index(channel_urls, use_cache=use_cache, unknown=unknown)
# supplement index with information from prefix/conda-meta
if prefix:
priorities = {chnl: prrty for chnl, prrty in itervalues(channel_urls)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(linked_data(prefix)):
fn = info['fn']
schannel = info['schannel']
prefix = '' if schannel == DEFAULTS else schannel + '::'
priority = priorities.get(schannel, maxp)
key = Dist(prefix + fn)
if key in index:
# Copy the link information so the resolver knows this is installed
index[key] = index[key].copy()
index[key]['link'] = info.get('link') or EMPTY_LINK
else:
# only if the package in not in the repodata, use local
# conda-meta (with 'depends' defaulting to [])
info.setdefault('depends', [])
# If the schannel is known but the package is not in the index, it is
# because 1) the channel is unavailable offline or 2) the package has
# been removed from that channel. Either way, we should prefer any
# other version of the package to this one.
info['priority'] = MAX_CHANNEL_PRIORITY if schannel in priorities else priority
index[key] = info
return index
# We need a decorator so that the dot gets printed *after* the repodata is fetched
class dotlog_on_return(object):
def __init__(self, msg):
self.msg = msg
def __call__(self, f):
@wraps(f)
def func(*args, **kwargs):
res = f(*args, **kwargs)
dotlog.debug("%s args %s kwargs %s" % (self.msg, args, kwargs))
return res
return func
@dotlog_on_return("fetching repodata:")
def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
if not offline_keep(url):
return {'packages': {}}
cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url))
try:
log.debug("Opening repodata cache for %s at %s", url, cache_path)
with open(cache_path) as f:
cache = json.load(f)
except (IOError, ValueError):
cache = {'packages': {}}
if use_cache:
return cache
if not context.ssl_verify:
warnings.simplefilter('ignore', InsecureRequestWarning)
session = session or CondaSession()
headers = {}
if "_etag" in cache:
headers["If-None-Match"] = cache["_etag"]
if "_mod" in cache:
headers["If-Modified-Since"] = cache["_mod"]
if 'repo.continuum.io' in url or url.startswith("file://"):
filename = 'repodata.json.bz2'
headers['Accept-Encoding'] = 'identity'
else:
headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
headers['Content-Type'] = 'application/json'
filename = 'repodata.json'
try:
timeout = context.http_connect_timeout_secs, context.http_read_timeout_secs
resp = session.get(join_url(url, filename), headers=headers, proxies=session.proxies,
timeout=timeout)
if log.isEnabledFor(DEBUG):
log.debug(stringify(resp))
resp.raise_for_status()
if resp.status_code != 304:
def get_json_str(filename, resp_content):
if filename.endswith('.bz2'):
return bz2.decompress(resp_content).decode('utf-8')
else:
return resp_content.decode('utf-8')
if url.startswith('file://'):
file_path = url_to_path(url)
with FileLock(dirname(file_path)):
json_str = get_json_str(filename, resp.content)
else:
json_str = get_json_str(filename, resp.content)
cache = json.loads(json_str)
add_http_value_to_dict(resp, 'Etag', cache, '_etag')
add_http_value_to_dict(resp, 'Last-Modified', cache, '_mod')
except ValueError as e:
raise CondaRuntimeError("Invalid index file: {0}: {1}".format(join_url(url, filename), e))
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
if url.endswith('/noarch'): # noarch directory might not exist
return None
help_message = dals("""
The remote server could not find the channel you requested.
You will need to adjust your conda configuration to proceed.
Use `conda config --show` to view your configuration's current state.
Further configuration help can be found at <%s>.
""" % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html'))
elif e.response.status_code == 403:
if url.endswith('/noarch'):
return None
else:
help_message = dals("""
The channel you requested is not available on the remote server.
You will need to adjust your conda configuration to proceed.
Use `conda config --show` to view your configuration's current state.
Further configuration help can be found at <%s>.
""" % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html'))
elif e.response.status_code == 401:
channel = Channel(url)
if channel.token:
help_message = dals("""
The token '%s' given for the URL is invalid.
If this token was pulled from anaconda-client, you will need to use
anaconda-client to reauthenticate.
If you supplied this token to conda directly, you will need to adjust your
conda configuration to proceed.
Use `conda config --show` to view your configuration's current state.
Further configuration help can be found at <%s>.
""" % (channel.token, join_url(CONDA_HOMEPAGE_URL, 'docs/config.html')))
elif context.channel_alias.location in url:
# Note, this will not trigger if the binstar configured url does
# not match the conda configured one.
help_message = dals("""
The remote server has indicated you are using invalid credentials for this channel.
If the remote site is anaconda.org or follows the Anaconda Server API, you
will need to
(a) login to the site with `anaconda login`, or
(b) provide conda with a valid token directly.
Further configuration help can be found at <%s>.
""" % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html'))
else:
help_message = dals("""
The credentials you have provided for this URL are invalid.
You will need to modify your conda configuration to proceed.
Use `conda config --show` to view your configuration's current state.
Further configuration help can be found at <%s>.
""" % join_url(CONDA_HOMEPAGE_URL, 'docs/config.html'))
elif 500 <= e.response.status_code < 600:
help_message = dals("""
An remote server error occurred when trying to retrieve this URL.
A 500-type error (e.g. 500, 501, 502, 503, etc.) indicates the server failed to
fulfill a valid request. The problem may be spurious, and will resolve itself if you
try your request again. If the problem persists, consider notifying the maintainer
of the remote server.
""")
else:
help_message = "An HTTP error occurred when trying to retrieve this URL."
raise CondaHTTPError(help_message, e.response.url, e.response.status_code,
e.response.reason)
except requests.exceptions.SSLError as e:
msg = "SSL Error: %s\n" % e
stderrlog.info("SSL verification error: %s\n" % e)
log.debug(msg)
except requests.exceptions.ConnectionError as e:
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
if fail_unknown_host:
raise CondaRuntimeError(msg)
raise CondaRuntimeError(msg)
cache['_url'] = url
try:
with open(cache_path, 'w') as fo:
json.dump(cache, fo, indent=2, sort_keys=True, cls=EntityEncoder)
except IOError:
pass
return cache or None
def fetch_index(channel_urls, use_cache=False, unknown=False, index=None):
log.debug('channel_urls=' + repr(channel_urls))
# pool = ThreadPool(5)
if index is None:
index = {}
if not context.json:
stdoutlog.info("Fetching package metadata ...")
urls = tuple(filter(offline_keep, channel_urls))
try:
import concurrent.futures
executor = concurrent.futures.ThreadPoolExecutor(10)
except (ImportError, RuntimeError) as e:
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
# RuntimeError is thrown if number of threads are limited by OS
log.debug(repr(e))
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
else:
try:
futures = tuple(executor.submit(fetch_repodata, url, use_cache=use_cache,
session=CondaSession()) for url in urls)
repodatas = [(u, f.result()) for u, f in zip(urls, futures)]
except RuntimeError as e:
# Cannot start new thread, then give up parallel execution
log.debug(repr(e))
session = CondaSession()
repodatas = [(url, fetch_repodata(url, use_cache=use_cache, session=session))
for url in urls]
finally:
executor.shutdown(wait=True)
def make_index(repodatas):
result = dict()
for channel_url, repodata in repodatas:
if repodata is None:
continue
canonical_name, priority = channel_urls[channel_url]
channel = Channel(channel_url)
for fn, info in iteritems(repodata['packages']):
full_url = join_url(channel_url, fn)
info.update(dict(fn=fn,
schannel=canonical_name,
channel=channel_url,
priority=priority,
url=full_url,
auth=channel.auth,
))
key = Dist(canonical_name + '::' + fn if canonical_name != 'defaults' else fn)
result[key] = Record(**info)
return result
index = make_index(repodatas)
if not context.json:
stdoutlog.info('\n')
if unknown:
add_unknown(index, channel_urls)
if context.add_pip_as_python_dependency:
add_pip_dependency(index)
return index
def cache_fn_url(url):
md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
return '%s.json' % (md5[:8],)
def add_http_value_to_dict(resp, http_key, d, dict_key):
value = resp.headers.get(http_key)
if value:
d[dict_key] = value
def add_unknown(index, priorities):
priorities = {p[0]: p[1] for p in itervalues(priorities)}
maxp = max(itervalues(priorities)) + 1 if priorities else 1
for dist, info in iteritems(package_cache()):
# schannel, dname = dist2pair(dist)
fname = dist.to_filename()
# fkey = dist + '.tar.bz2'
if dist in index or not info['dirs']:
continue
try:
with open(join(info['dirs'][0], 'info', 'index.json')) as fi:
meta = Record(**json.load(fi))
except IOError:
continue
if info['urls']:
url = info['urls'][0]
elif meta.get('url'):
url = meta['url']
elif meta.get('channel'):
url = meta['channel'].rstrip('/') + '/' + fname
else:
url = '<unknown>/' + fname
if url.rsplit('/', 1)[-1] != fname:
continue
channel, schannel2 = Channel(url).url_channel_wtf
if schannel2 != dist.channel:
continue
priority = priorities.get(dist.channel, maxp)
if 'link' in meta:
del meta['link']
meta.update({'fn': fname,
'url': url,
'channel': channel,
'schannel': dist.channel,
'priority': priority,
})
meta.setdefault('depends', [])
log.debug("adding cached pkg to index: %s" % dist)
index[dist] = meta
def add_pip_dependency(index):
for info in itervalues(index):
if info['name'] == 'python' and info['version'].startswith(('2.', '3.')):
info['depends'] = info['depends'] + ('pip',)
def create_cache_dir():
cache_dir = join(context.pkgs_dirs[0], 'cache')
try:
makedirs(cache_dir)
except OSError:
pass
return cache_dir
conda/core/package_cache.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import hashlib
import os
import requests
import sys
import tarfile
import warnings
from logging import getLogger
from os.path import basename, dirname, exists, isdir, isfile, join
from ..base.constants import DEFAULTS
from ..base.context import context
from ..common.disk import exp_backoff_fn, rm_rf
from ..common.url import path_to_url, maybe_add_auth
from ..connection import CondaSession, RETRIES
from ..exceptions import CondaRuntimeError, CondaSignatureError, MD5MismatchError
from ..lock import FileLock
from ..models.channel import Channel, offline_keep
from ..models.dist import Dist
log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = url
# make dist
schannel = Channel(url).canonical_name
prefix = '' if schannel == DEFAULTS else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[path_to_url(xkey)] = prefix
fkey = prefix + dist
dist = Dist(fkey)
rec = package_cache_.get(dist)
if rec is None:
rec = package_cache_[dist] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in context.pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in context.pkgs_dirs:
pkg_path = join(pkg_dir, dist.to_filename())
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dist.dist_name if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[path_to_url(fname)]
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("File not removed during RM_FETCHED instruction: %s", fname)
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_FETCHED instruction: %s", fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with FileLock(fname):
rm_rf(fname)
if exists(fname):
log.warn("Directory not removed during RM_EXTRACTED instruction: %s", fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
path = fname[:-8]
with FileLock(path):
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def fetch_pkg(info, dst_dir=None, session=None):
'''
fetch a package given by `info` and store it into `dst_dir`
'''
session = session or CondaSession()
fn = info['fn']
url = info.get('url') or info['channel'] + '/' + fn
url = maybe_add_auth(url, info.get('auth'))
log.debug("url=%r" % url)
if dst_dir is None:
dst_dir = find_new_location(Dist(fn))[0]
path = join(dst_dir, fn)
download(url, path, session=session, md5=info['md5'], urlstxt=True)
if info.get('sig'):
from ..signature import verify
fn2 = fn + '.sig'
url = (info['channel'] if info['sig'] == '.' else
info['sig'].rstrip('/')) + '/' + fn2
log.debug("signature url=%r" % url)
download(url, join(dst_dir, fn2), session=session)
try:
if verify(path):
return
except CondaSignatureError:
raise
raise CondaSignatureError("Error: Signature for '%s' is invalid." % (basename(path)))
def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None):
assert "::" not in str(dst_path), str(dst_path)
if not offline_keep(url):
raise RuntimeError("Cannot download in offline mode: %s" % (url,))
pp = dst_path + '.part'
dst_dir = dirname(dst_path)
session = session or CondaSession()
if not context.ssl_verify:
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
if retries is None:
retries = RETRIES
with FileLock(dst_path):
rm_rf(dst_path)
try:
timeout = context.http_connect_timeout_secs, context.http_read_timeout_secs
resp = session.get(url, stream=True, proxies=session.proxies, timeout=timeout)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
msg = "HTTPError: %s: %s\n" % (e, url)
log.debug(msg)
raise CondaRuntimeError(msg)
except requests.exceptions.ConnectionError as e:
msg = "Connection error: %s: %s\n" % (e, url)
stderrlog.info('Could not connect to %s\n' % url)
log.debug(msg)
raise CondaRuntimeError(msg)
except IOError as e:
raise CondaRuntimeError("Could not open '%s': %s" % (url, e))
size = resp.headers.get('Content-Length')
if size:
size = int(size)
fn = basename(dst_path)
getLogger('fetch.start').info((fn[:14], size))
if md5:
h = hashlib.new('md5')
try:
with open(pp, 'wb') as fo:
index = 0
for chunk in resp.iter_content(2**14):
index += len(chunk)
try:
fo.write(chunk)
except IOError:
raise CondaRuntimeError("Failed to write to %r." % pp)
if md5:
h.update(chunk)
if size and 0 <= index <= size:
getLogger('fetch.update').info(index)
except IOError as e:
if e.errno == 104 and retries: # Connection reset by pee
# try again
log.debug("%s, trying again" % e)
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise CondaRuntimeError("Could not open %r for writing (%s)." % (pp, e))
if size:
getLogger('fetch.stop').info(None)
if md5 and h.hexdigest() != md5:
if retries:
# try again
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, h.hexdigest(), md5))
return download(url, dst_path, session=session, md5=md5,
urlstxt=urlstxt, retries=retries - 1)
raise MD5MismatchError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, h.hexdigest(), md5))
try:
exp_backoff_fn(os.rename, pp, dst_path)
except OSError as e:
raise CondaRuntimeError("Could not rename %r to %r: %r" %
(pp, dst_path, e))
if urlstxt:
add_cached_package(dst_dir, url, overwrite=True, urlstxt=True)
| conda/base/context.py
--- a/conda/base/context.py
+++ b/conda/base/context.py
@@ -79,6 +79,8 @@ class Context(Configuration):
_channel_alias = PrimitiveParameter(DEFAULT_CHANNEL_ALIAS,
aliases=('channel_alias',),
validation=channel_alias_validation)
+ http_connect_timeout_secs = PrimitiveParameter(6.1)
+ http_read_timeout_secs = PrimitiveParameter(60.)
# channels
channels = SequenceParameter(string_types, default=('defaults',))
conda/core/index.py
--- a/conda/core/index.py
+++ b/conda/core/index.py
@@ -130,8 +130,9 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None):
filename = 'repodata.json'
try:
+ timeout = context.http_connect_timeout_secs, context.http_read_timeout_secs
resp = session.get(join_url(url, filename), headers=headers, proxies=session.proxies,
- timeout=(3.05, 60))
+ timeout=timeout)
if log.isEnabledFor(DEBUG):
log.debug(stringify(resp))
resp.raise_for_status()
conda/core/package_cache.py
--- a/conda/core/package_cache.py
+++ b/conda/core/package_cache.py
@@ -321,7 +321,8 @@ def download(url, dst_path, session=None, md5=None, urlstxt=False, retries=None)
with FileLock(dst_path):
rm_rf(dst_path)
try:
- resp = session.get(url, stream=True, proxies=session.proxies, timeout=(3.05, 27))
+ timeout = context.http_connect_timeout_secs, context.http_read_timeout_secs
+ resp = session.get(url, stream=True, proxies=session.proxies, timeout=timeout)
resp.raise_for_status()
except requests.exceptions.HTTPError as e:
msg = "HTTPError: %s: %s\n" % (e, url) |
Regression: cannot install from explicit conda package filenames
This command used to work, but now it gives the following error/traceback:
Example: `conda install bzip2-1.0.6-vc14_3.tar.bz2 --dry-run`
```
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Current conda install:
platform : win-64
conda version : 4.2.12
conda is private : False
conda-env version : 4.2.12
conda-build version : 2.0.7
python version : 3.5.2.final.0
requests version : 2.10.0
root environment : C:\Miniconda3 (writable)
default environment : C:\Miniconda3\envs\test_conda
envs directories : C:\Miniconda3\envs
package cache : C:\Miniconda3\pkgs
channel URLs : https://repo.continuum.io/pkgs/free/win-64
https://repo.continuum.io/pkgs/free/noarch
https://repo.continuum.io/pkgs/pro/win-64
https://repo.continuum.io/pkgs/pro/noarch
https://repo.continuum.io/pkgs/msys2/win-64
https://repo.continuum.io/pkgs/msys2/noarch
config file : None
offline mode : False
`$ C:\Miniconda3\Scripts\conda-script.py install bzip2-1.0.6-vc14_3.tar.bz2 --dry-run`
Traceback (most recent call last):
File "C:\Miniconda3\lib\site-packages\conda\exceptions.py", line 479, in conda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Miniconda3\lib\site-packages\conda\cli\main.py", line 145, in _main
exit_code = args.func(args, p)
File "C:\Miniconda3\lib\site-packages\conda\cli\main_install.py", line 80, in execute
install(args, parser, 'install')
File "C:\Miniconda3\lib\site-packages\conda\cli\install.py", line 209, in install
explicit(args.packages, prefix, verbose=not context.quiet)
File "C:\Miniconda3\lib\site-packages\conda\misc.py", line 66, in explicit
if not is_url(url_p):
File "C:\Miniconda3\lib\site-packages\conda\common\url.py", line 72, in is_url
p = urlparse(url)
File "C:\Miniconda3\lib\site-packages\conda\_vendor\auxlib\decorators.py", line 56, in _memoized_func
result = func(*args, **kwargs)
File "C:\Miniconda3\lib\site-packages\conda\common\url.py", line 55, in urlparse
if on_win and url.startswith('file:'):
AttributeError: 'NoneType' object has no attribute 'startswith'
```
| conda/common/url.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import re
import socket
import sys
from getpass import getpass
from logging import getLogger
from os.path import abspath, expanduser
try:
# Python 3
from urllib.parse import (quote, quote_plus, unquote, unquote_plus, # NOQA
urlunparse as stdlib_urlparse, urljoin) # NOQA
from urllib.request import pathname2url # NOQA
except ImportError:
# Python 2
from urllib import quote, quote_plus, unquote, unquote_plus, pathname2url # NOQA
from urlparse import urlunparse as stdlib_urlparse, urljoin # NOQA
from requests.packages.urllib3.exceptions import LocationParseError
from requests.packages.urllib3.util.url import Url, parse_url
from .._vendor.auxlib.decorators import memoize
log = getLogger(__name__)
on_win = bool(sys.platform == "win32")
@memoize
def path_to_url(path):
path = abspath(expanduser(path))
url = urljoin('file:', pathname2url(path))
log.debug("%s converted to %s", path, url)
return url
def url_to_path(url): # NOQA
"""Convert a file:// URL to a path."""
assert url.startswith('file:'), "You can only turn file: urls into filenames (not %r)" % url
path = url[len('file:'):].lstrip('/')
path = unquote(path)
if re.match('^([a-z])[:|]', path, re.I):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
@memoize
def urlparse(url):
if on_win and url.startswith('file:'):
url.replace('\\', '/')
return parse_url(url)
def url_to_s3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = parse_url(url)
assert parsed_url.scheme == 's3', "You can only use s3: urls (not %r)" % url
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
def is_url(url):
try:
p = urlparse(url)
return p.netloc is not None or p.scheme == "file"
except LocationParseError:
log.debug("Could not parse url ({0}).".format(url))
return False
def is_ipv4_address(string_ip):
"""
Examples:
>>> [is_ipv4_address(ip) for ip in ('8.8.8.8', '192.168.10.10', '255.255.255.255')]
[True, True, True]
>>> [is_ipv4_address(ip) for ip in ('8.8.8', '192.168.10.10.20', '256.255.255.255', '::1')]
[False, False, False, False]
"""
try:
socket.inet_aton(string_ip)
except socket.error:
return False
return string_ip.count('.') == 3
def is_ipv6_address(string_ip):
"""
Examples:
>>> [is_ipv6_address(ip) for ip in ('::1', '2001:db8:85a3::370:7334', '1234:'*7+'1234')]
[True, True, True]
>>> [is_ipv6_address(ip) for ip in ('192.168.10.10', '1234:'*8+'1234')]
[False, False]
"""
try:
socket.inet_pton(socket.AF_INET6, string_ip)
except socket.error:
return False
return True
def is_ip_address(string_ip):
"""
Examples:
>>> is_ip_address('192.168.10.10')
True
>>> is_ip_address('::1')
True
>>> is_ip_address('www.google.com')
False
"""
return is_ipv4_address(string_ip) or is_ipv6_address(string_ip)
def join(*args):
start = '/' if not args[0] or args[0].startswith('/') else ''
return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y)
join_url = join
def has_scheme(value):
return re.match(r'[a-z][a-z0-9]{0,11}://', value)
def strip_scheme(url):
return url.split('://', 1)[-1]
def mask_anaconda_token(url):
_, token = split_anaconda_token(url)
return url.replace(token, "<TOKEN>", 1) if token else url
def split_anaconda_token(url):
"""
Examples:
>>> split_anaconda_token("https://1.2.3.4/t/tk-123-456/path")
(u'https://1.2.3.4/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/t//path")
(u'https://1.2.3.4/path', u'')
>>> split_anaconda_token("https://some.domain/api/t/tk-123-456/path")
(u'https://some.domain/api/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/conda/t/tk-123-456/path")
(u'https://1.2.3.4/conda/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/path")
(u'https://1.2.3.4/path', None)
>>> split_anaconda_token("https://10.2.3.4:8080/conda/t/tk-123-45")
(u'https://10.2.3.4:8080/conda', u'tk-123-45')
"""
_token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url)
token = _token_match.groups()[0] if _token_match else None
cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url
return cleaned_url.rstrip('/'), token
def split_platform(url):
"""
Examples:
>>> split_platform("https://1.2.3.4/t/tk-123/osx-64/path")
(u'https://1.2.3.4/t/tk-123/path', u'osx-64')
"""
from conda.base.constants import PLATFORM_DIRECTORIES
_platform_match_regex = r'/(%s)/?' % r'|'.join(r'%s' % d for d in PLATFORM_DIRECTORIES)
_platform_match = re.search(_platform_match_regex, url, re.IGNORECASE)
platform = _platform_match.groups()[0] if _platform_match else None
cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url
return cleaned_url.rstrip('/'), platform
def split_package_filename(url):
cleaned_url, package_filename = (url.rsplit('/', 1) if url.endswith(('.tar.bz2', '.json'))
else (url, None))
return cleaned_url, package_filename
def split_scheme_auth_token(url):
if not url:
return None, None, None, None
cleaned_url, token = split_anaconda_token(url)
url_parts = urlparse(cleaned_url)
remainder_url = Url(host=url_parts.host, port=url_parts.port, path=url_parts.path,
query=url_parts.query).url
return remainder_url, url_parts.scheme, url_parts.auth, token
def split_conda_url_easy_parts(url):
# scheme, auth, token, platform, package_filename, host, port, path, query
cleaned_url, token = split_anaconda_token(url)
cleaned_url, platform = split_platform(cleaned_url)
cleaned_url, package_filename = split_package_filename(cleaned_url)
# TODO: split out namespace using regex
url_parts = urlparse(cleaned_url)
return (url_parts.scheme, url_parts.auth, token, platform, package_filename, url_parts.host,
url_parts.port, url_parts.path, url_parts.query)
def is_windows_path(value):
return re.match(r'[a-z]:[/\\]', value, re.IGNORECASE)
@memoize
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass("Password:")
return username, passwd
def add_username_and_password(url, username, password):
url_parts = parse_url(url)._asdict()
url_parts['auth'] = username + ':' + quote(password, '')
return Url(**url_parts).url
def maybe_add_auth(url, auth, force=False):
"""add auth if the url doesn't currently have it"""
if not auth:
return url
url_parts = urlparse(url)._asdict()
if url_parts['auth'] and not force:
return url
url_parts['auth'] = auth
return Url(**url_parts).url
if __name__ == "__main__":
import doctest
doctest.testmod()
<|code_end|>
| conda/common/url.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import re
import socket
import sys
from getpass import getpass
from logging import getLogger
from os.path import abspath, expanduser
try:
# Python 3
from urllib.parse import (quote, quote_plus, unquote, unquote_plus, # NOQA
urlunparse as stdlib_urlparse, urljoin) # NOQA
from urllib.request import pathname2url # NOQA
except ImportError:
# Python 2
from urllib import quote, quote_plus, unquote, unquote_plus, pathname2url # NOQA
from urlparse import urlunparse as stdlib_urlparse, urljoin # NOQA
from requests.packages.urllib3.exceptions import LocationParseError
from requests.packages.urllib3.util.url import Url, parse_url
from .._vendor.auxlib.decorators import memoize
log = getLogger(__name__)
on_win = bool(sys.platform == "win32")
@memoize
def path_to_url(path):
path = abspath(expanduser(path))
url = urljoin('file:', pathname2url(path))
log.debug("%s converted to %s", path, url)
return url
def url_to_path(url): # NOQA
"""Convert a file:// URL to a path."""
assert url.startswith('file:'), "You can only turn file: urls into filenames (not %r)" % url
path = url[len('file:'):].lstrip('/')
path = unquote(path)
if re.match('^([a-z])[:|]', path, re.I):
path = path[0] + ':' + path[2:]
elif not path.startswith(r'\\'):
# if not a Windows UNC path
path = '/' + path
return path
@memoize
def urlparse(url):
if on_win and url.startswith('file:'):
url.replace('\\', '/')
return parse_url(url)
def url_to_s3_info(url):
"""
Convert a S3 url to a tuple of bucket and key
"""
parsed_url = parse_url(url)
assert parsed_url.scheme == 's3', "You can only use s3: urls (not %r)" % url
bucket, key = parsed_url.host, parsed_url.path
return bucket, key
def is_url(url):
if not url:
return False
try:
p = urlparse(url)
return p.netloc is not None or p.scheme == "file"
except LocationParseError:
log.debug("Could not parse url ({0}).".format(url))
return False
def is_ipv4_address(string_ip):
"""
Examples:
>>> [is_ipv4_address(ip) for ip in ('8.8.8.8', '192.168.10.10', '255.255.255.255')]
[True, True, True]
>>> [is_ipv4_address(ip) for ip in ('8.8.8', '192.168.10.10.20', '256.255.255.255', '::1')]
[False, False, False, False]
"""
try:
socket.inet_aton(string_ip)
except socket.error:
return False
return string_ip.count('.') == 3
def is_ipv6_address(string_ip):
"""
Examples:
>>> [is_ipv6_address(ip) for ip in ('::1', '2001:db8:85a3::370:7334', '1234:'*7+'1234')]
[True, True, True]
>>> [is_ipv6_address(ip) for ip in ('192.168.10.10', '1234:'*8+'1234')]
[False, False]
"""
try:
socket.inet_pton(socket.AF_INET6, string_ip)
except socket.error:
return False
return True
def is_ip_address(string_ip):
"""
Examples:
>>> is_ip_address('192.168.10.10')
True
>>> is_ip_address('::1')
True
>>> is_ip_address('www.google.com')
False
"""
return is_ipv4_address(string_ip) or is_ipv6_address(string_ip)
def join(*args):
start = '/' if not args[0] or args[0].startswith('/') else ''
return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y)
join_url = join
def has_scheme(value):
return re.match(r'[a-z][a-z0-9]{0,11}://', value)
def strip_scheme(url):
return url.split('://', 1)[-1]
def mask_anaconda_token(url):
_, token = split_anaconda_token(url)
return url.replace(token, "<TOKEN>", 1) if token else url
def split_anaconda_token(url):
"""
Examples:
>>> split_anaconda_token("https://1.2.3.4/t/tk-123-456/path")
(u'https://1.2.3.4/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/t//path")
(u'https://1.2.3.4/path', u'')
>>> split_anaconda_token("https://some.domain/api/t/tk-123-456/path")
(u'https://some.domain/api/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/conda/t/tk-123-456/path")
(u'https://1.2.3.4/conda/path', u'tk-123-456')
>>> split_anaconda_token("https://1.2.3.4/path")
(u'https://1.2.3.4/path', None)
>>> split_anaconda_token("https://10.2.3.4:8080/conda/t/tk-123-45")
(u'https://10.2.3.4:8080/conda', u'tk-123-45')
"""
_token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url)
token = _token_match.groups()[0] if _token_match else None
cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url
return cleaned_url.rstrip('/'), token
def split_platform(url):
"""
Examples:
>>> split_platform("https://1.2.3.4/t/tk-123/osx-64/path")
(u'https://1.2.3.4/t/tk-123/path', u'osx-64')
"""
from conda.base.constants import PLATFORM_DIRECTORIES
_platform_match_regex = r'/(%s)/?' % r'|'.join(r'%s' % d for d in PLATFORM_DIRECTORIES)
_platform_match = re.search(_platform_match_regex, url, re.IGNORECASE)
platform = _platform_match.groups()[0] if _platform_match else None
cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url
return cleaned_url.rstrip('/'), platform
def split_package_filename(url):
cleaned_url, package_filename = (url.rsplit('/', 1) if url.endswith(('.tar.bz2', '.json'))
else (url, None))
return cleaned_url, package_filename
def split_scheme_auth_token(url):
if not url:
return None, None, None, None
cleaned_url, token = split_anaconda_token(url)
url_parts = urlparse(cleaned_url)
remainder_url = Url(host=url_parts.host, port=url_parts.port, path=url_parts.path,
query=url_parts.query).url
return remainder_url, url_parts.scheme, url_parts.auth, token
def split_conda_url_easy_parts(url):
# scheme, auth, token, platform, package_filename, host, port, path, query
cleaned_url, token = split_anaconda_token(url)
cleaned_url, platform = split_platform(cleaned_url)
cleaned_url, package_filename = split_package_filename(cleaned_url)
# TODO: split out namespace using regex
url_parts = urlparse(cleaned_url)
return (url_parts.scheme, url_parts.auth, token, platform, package_filename, url_parts.host,
url_parts.port, url_parts.path, url_parts.query)
def is_windows_path(value):
return re.match(r'[a-z]:[/\\]', value, re.IGNORECASE)
@memoize
def get_proxy_username_and_pass(scheme):
username = input("\n%s proxy username: " % scheme)
passwd = getpass("Password:")
return username, passwd
def add_username_and_password(url, username, password):
url_parts = parse_url(url)._asdict()
url_parts['auth'] = username + ':' + quote(password, '')
return Url(**url_parts).url
def maybe_add_auth(url, auth, force=False):
"""add auth if the url doesn't currently have it"""
if not auth:
return url
url_parts = urlparse(url)._asdict()
if url_parts['auth'] and not force:
return url
url_parts['auth'] = auth
return Url(**url_parts).url
if __name__ == "__main__":
import doctest
doctest.testmod()
| conda/common/url.py
--- a/conda/common/url.py
+++ b/conda/common/url.py
@@ -68,6 +68,8 @@ def url_to_s3_info(url):
def is_url(url):
+ if not url:
+ return False
try:
p = urlparse(url)
return p.netloc is not None or p.scheme == "file" |
Invalid requirement while trying to use pip options
Hi!
I have in my pip section inside envrionment.yaml file this line
```- rep --install-option='--no-deps'```
while I am trying to update my environment I am getting this error
```Invalid requirement: 'rep --install-option='--no-deps''```
if I do pip -r requirements.txt and I have that line as it is in requirements.txt it works.
| conda/egg_info.py
<|code_start|>
"""
Functions related to core conda functionality that relates to manually
installed Python packages, e.g. using "python setup.py install", or "pip".
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from io import open
import os
from os.path import isdir, isfile, join
import re
import sys
from .common.compat import itervalues, on_win
from .core.linked_data import linked_data
from .misc import rel_path
from .models.dist import Dist
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
if on_win:
stdlib_dir = 'Lib'
else:
py_ver = info['version'][:3]
stdlib_dir = 'lib/python%s' % py_ver
return join(stdlib_dir, 'site-packages')
return None
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
if not fn.endswith(('.egg', '.egg-info', '.dist-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
join(path, 'EGG-INFO', 'PKG-INFO'),
join(path, 'METADATA')]:
if isfile(path2):
yield path2
pat = re.compile(r'(\w+):\s*(\S+)', re.I)
def parse_egg_info(path):
"""
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
for line in open(path, encoding='utf-8'):
line = line.strip()
m = pat.match(line)
if m:
key = m.group(1).lower()
info[key] = m.group(2)
try:
return '%(name)s-%(version)s-<pip>' % info
except KeyError:
pass
return None
def get_egg_info(prefix, all_pkgs=False):
"""
Return a set of canonical names of all Python packages (in `prefix`),
by inspecting the .egg-info files inside site-packages.
By default, only untracked (not conda installed) .egg-info files are
considered. Setting `all_pkgs` to True changes this.
"""
installed_pkgs = linked_data(prefix)
sp_dir = get_site_packages_dir(installed_pkgs)
if sp_dir is None:
return set()
conda_files = set()
for info in itervalues(installed_pkgs):
conda_files.update(info.get('files', []))
res = set()
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
try:
dist = parse_egg_info(path)
except UnicodeDecodeError:
dist = None
if dist:
res.add(Dist(dist))
return res
if __name__ == '__main__':
from pprint import pprint
pprint(get_egg_info(sys.prefix))
<|code_end|>
conda_env/installers/pip.py
<|code_start|>
from __future__ import absolute_import
import subprocess
from conda_env.pip_util import pip_args
from conda.exceptions import CondaValueError
def install(prefix, specs, args, env, prune=False):
pip_cmd = pip_args(prefix) + ['install', ] + specs
process = subprocess.Popen(pip_cmd, universal_newlines=True)
process.communicate()
if process.returncode != 0:
raise CondaValueError("pip returned an error.")
<|code_end|>
| conda/egg_info.py
"""
Functions related to core conda functionality that relates to manually
installed Python packages, e.g. using "python setup.py install", or "pip".
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from io import open
import os
from os.path import isdir, isfile, join
import re
import sys
from .common.compat import itervalues, on_win
from .core.linked_data import linked_data
from .misc import rel_path
from .models.dist import Dist
def get_site_packages_dir(installed_pkgs):
for info in itervalues(installed_pkgs):
if info['name'] == 'python':
if on_win:
stdlib_dir = 'Lib'
else:
py_ver = info['version'][:3]
stdlib_dir = 'lib/python%s' % py_ver
return join(stdlib_dir, 'site-packages')
return None
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
if fn.endswith('.egg-link'):
with open(join(sp_dir, fn), 'r') as reader:
for egg in get_egg_info_files(reader.readline().strip()):
yield egg
if not fn.endswith(('.egg', '.egg-info', '.dist-info')):
continue
path = join(sp_dir, fn)
if isfile(path):
yield path
elif isdir(path):
for path2 in [join(path, 'PKG-INFO'),
join(path, 'EGG-INFO', 'PKG-INFO'),
join(path, 'METADATA')]:
if isfile(path2):
yield path2
pat = re.compile(r'(\w+):\s*(\S+)', re.I)
def parse_egg_info(path):
"""
Parse an .egg-info file and return its canonical distribution name
"""
info = {}
for line in open(path, encoding='utf-8'):
line = line.strip()
m = pat.match(line)
if m:
key = m.group(1).lower()
info[key] = m.group(2)
try:
return '%(name)s-%(version)s-<pip>' % info
except KeyError:
pass
return None
def get_egg_info(prefix, all_pkgs=False):
"""
Return a set of canonical names of all Python packages (in `prefix`),
by inspecting the .egg-info files inside site-packages.
By default, only untracked (not conda installed) .egg-info files are
considered. Setting `all_pkgs` to True changes this.
"""
installed_pkgs = linked_data(prefix)
sp_dir = get_site_packages_dir(installed_pkgs)
if sp_dir is None:
return set()
conda_files = set()
for info in itervalues(installed_pkgs):
conda_files.update(info.get('files', []))
res = set()
for path in get_egg_info_files(join(prefix, sp_dir)):
f = rel_path(prefix, path)
if all_pkgs or f not in conda_files:
try:
dist = parse_egg_info(path)
except UnicodeDecodeError:
dist = None
if dist:
res.add(Dist(dist))
return res
if __name__ == '__main__':
from pprint import pprint
pprint(get_egg_info(sys.prefix))
conda_env/installers/pip.py
from __future__ import absolute_import
import os
import os.path as op
import subprocess
import tempfile
from conda_env.pip_util import pip_args
from conda.exceptions import CondaValueError
def _pip_install_via_requirements(prefix, specs, args, *_):
"""
Installs the pip dependencies in specs using a temporary pip requirements file.
Args
----
prefix: string
The path to the python and pip executables.
specs: iterable of strings
Each element should be a valid pip dependency.
See: https://pip.pypa.io/en/stable/user_guide/#requirements-files
https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format
"""
try:
pip_workdir = op.dirname(op.abspath(args.file))
except AttributeError:
pip_workdir = None
requirements = None
try:
# Generate the temporary requirements file
requirements = tempfile.NamedTemporaryFile(mode='w',
prefix='condaenv.',
suffix='.requirements.txt',
dir=pip_workdir,
delete=False)
requirements.write('\n'.join(specs))
requirements.close()
# pip command line...
pip_cmd = pip_args(prefix) + ['install', '-r', requirements.name]
# ...run it
process = subprocess.Popen(pip_cmd,
cwd=pip_workdir,
universal_newlines=True)
process.communicate()
if process.returncode != 0:
raise CondaValueError("pip returned an error")
finally:
# Win/Appveyor does not like it if we use context manager + delete=True.
# So we delete the temporary file in a finally block.
if requirements is not None and op.isfile(requirements.name):
os.remove(requirements.name)
# Conform to Installers API
install = _pip_install_via_requirements
| conda/egg_info.py
--- a/conda/egg_info.py
+++ b/conda/egg_info.py
@@ -30,6 +30,10 @@ def get_site_packages_dir(installed_pkgs):
def get_egg_info_files(sp_dir):
for fn in os.listdir(sp_dir):
+ if fn.endswith('.egg-link'):
+ with open(join(sp_dir, fn), 'r') as reader:
+ for egg in get_egg_info_files(reader.readline().strip()):
+ yield egg
if not fn.endswith(('.egg', '.egg-info', '.dist-info')):
continue
path = join(sp_dir, fn)
conda_env/installers/pip.py
--- a/conda_env/installers/pip.py
+++ b/conda_env/installers/pip.py
@@ -1,13 +1,56 @@
from __future__ import absolute_import
+
+import os
+import os.path as op
import subprocess
+import tempfile
from conda_env.pip_util import pip_args
from conda.exceptions import CondaValueError
-def install(prefix, specs, args, env, prune=False):
- pip_cmd = pip_args(prefix) + ['install', ] + specs
- process = subprocess.Popen(pip_cmd, universal_newlines=True)
- process.communicate()
+def _pip_install_via_requirements(prefix, specs, args, *_):
+ """
+ Installs the pip dependencies in specs using a temporary pip requirements file.
+
+ Args
+ ----
+ prefix: string
+ The path to the python and pip executables.
+
+ specs: iterable of strings
+ Each element should be a valid pip dependency.
+ See: https://pip.pypa.io/en/stable/user_guide/#requirements-files
+ https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format
+ """
+ try:
+ pip_workdir = op.dirname(op.abspath(args.file))
+ except AttributeError:
+ pip_workdir = None
+ requirements = None
+ try:
+ # Generate the temporary requirements file
+ requirements = tempfile.NamedTemporaryFile(mode='w',
+ prefix='condaenv.',
+ suffix='.requirements.txt',
+ dir=pip_workdir,
+ delete=False)
+ requirements.write('\n'.join(specs))
+ requirements.close()
+ # pip command line...
+ pip_cmd = pip_args(prefix) + ['install', '-r', requirements.name]
+ # ...run it
+ process = subprocess.Popen(pip_cmd,
+ cwd=pip_workdir,
+ universal_newlines=True)
+ process.communicate()
+ if process.returncode != 0:
+ raise CondaValueError("pip returned an error")
+ finally:
+ # Win/Appveyor does not like it if we use context manager + delete=True.
+ # So we delete the temporary file in a finally block.
+ if requirements is not None and op.isfile(requirements.name):
+ os.remove(requirements.name)
+
- if process.returncode != 0:
- raise CondaValueError("pip returned an error.")
+# Conform to Installers API
+install = _pip_install_via_requirements |
On Windows, conda 4.0.5-py35_0 cannot be updated to 4.3.0-py35_1
On a fresh install of the latest Miniconda on Windows, the following fails:
`conda update -c conda-canary --all`
Giving:
```
Fetching package metadata: ......
Solving package specifications: .........
Package plan for installation in environment C:\Users\ray\m2-x64-3.5:
The following packages will be downloaded:
package | build
---------------------------|-----------------
conda-env-2.6.0 | 0 498 B
vs2015_runtime-14.0.25123 | 0 1.9 MB
python-3.5.2 | 0 30.3 MB
pycosat-0.6.1 | py35_1 80 KB
pycrypto-2.6.1 | py35_4 481 KB
pywin32-220 | py35_1 10.4 MB
pyyaml-3.12 | py35_0 118 KB
requests-2.12.4 | py35_0 791 KB
ruamel_yaml-0.11.14 | py35_0 217 KB
setuptools-27.2.0 | py35_1 761 KB
menuinst-1.4.2 | py35_1 108 KB
pip-9.0.1 | py35_1 1.7 MB
conda-4.3.0 | py35_1 510 KB
------------------------------------------------------------
Total: 47.3 MB
The following NEW packages will be INSTALLED:
pywin32: 220-py35_1
ruamel_yaml: 0.11.14-py35_0
The following packages will be UPDATED:
conda: 4.0.5-py35_0 --> 4.3.0-py35_1
conda-env: 2.4.5-py35_0 --> 2.6.0-0
menuinst: 1.3.2-py35_0 --> 1.4.2-py35_1
pip: 8.1.1-py35_1 --> 9.0.1-py35_1
pycosat: 0.6.1-py35_0 --> 0.6.1-py35_1
pycrypto: 2.6.1-py35_3 --> 2.6.1-py35_4
python: 3.5.1-4 --> 3.5.2-0
pyyaml: 3.11-py35_3 --> 3.12-py35_0
requests: 2.9.1-py35_0 --> 2.12.4-py35_0
setuptools: 20.3-py35_0 --> 27.2.0-py35_1
vs2015_runtime: 14.00.23026.0-0 --> 14.0.25123-0
Proceed ([y]/n)? y
menuinst-1.4.2 100% |###############################| Time: 0:00:00 2.35 MB/s
Fetching packages ...
conda-env-2.6. 100% |###############################| Time: 0:00:00 0.00 B/s
vs2015_runtime 100% |###############################| Time: 0:00:00 9.24 MB/s
python-3.5.2-0 100% |###############################| Time: 0:00:02 11.57 MB/s
pycosat-0.6.1- 100% |###############################| Time: 0:00:00 2.61 MB/s
pycrypto-2.6.1 100% |###############################| Time: 0:00:00 4.51 MB/s
pywin32-220-py 100% |###############################| Time: 0:00:00 10.85 MB/s
pyyaml-3.12-py 100% |###############################| Time: 0:00:00 2.57 MB/s
requests-2.12. 100% |###############################| Time: 0:00:00 5.76 MB/s
ruamel_yaml-0. 100% |###############################| Time: 0:00:00 2.84 MB/s
setuptools-27. 100% |###############################| Time: 0:00:00 4.53 MB/s
pip-9.0.1-py35 100% |###############################| Time: 0:00:00 5.70 MB/s
conda-4.3.0-py 100% |###############################| Time: 0:00:00 530.91 kB/s
Extracting packages ...
[ COMPLETE ]|##################################################| 100%
Unlinking packages ...
[ COMPLETE ]|##################################################| 100%
Linking packages ...
[ COMPLETE ]|##################################################| 100%
Traceback (most recent call last):
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 516, in conda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\cli\main.py", line 84, in _main
from ..base.context import context
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\context.py", line 12, in <module>
from .constants import (APP_NAME, DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME,
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\constants.py", line 15
PREFIX_PLACEHOLDER = 'C:\Users\ray\m2-x64-3.5'
^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\ray\m2-x64-3.5\Scripts\conda-script.py", line 5, in <module>
sys.exit(conda.cli.main())
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\cli\main.py", line 152, in main
return conda_exception_handler(_main)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 532, in conda_exception_handler
print_unexpected_error_message(e)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 463, in print_unexpected_error_message
from conda.base.context import context
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\context.py", line 12, in <module>
from .constants import (APP_NAME, DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME,
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\constants.py", line 15
PREFIX_PLACEHOLDER = 'C:\Users\ray\m2-x64-3.5'
^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape
Traceback (most recent call last):
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 516, in conda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\cli\main.py", line 84, in _main
from ..base.context import context
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\context.py", line 12, in <module>
from .constants import (APP_NAME, DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME,
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\constants.py", line 15
PREFIX_PLACEHOLDER = 'C:\Users\ray\m2-x64-3.5'
^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\ray\m2-x64-3.5\Scripts\conda-script.py", line 5, in <module>
sys.exit(conda.cli.main())
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\cli\main.py", line 152, in main
return conda_exception_handler(_main)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 532, in conda_exception_handler
print_unexpected_error_message(e)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 463, in print_unexpected_error_message
from conda.base.context import context
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\context.py", line 12, in <module>
from .constants import (APP_NAME, DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME,
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\constants.py", line 15
PREFIX_PLACEHOLDER = 'C:\Users\ray\m2-x64-3.5'
^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape
```
On Windows, conda 4.0.5-py35_0 cannot be updated to 4.3.0-py35_1
On a fresh install of the latest Miniconda on Windows, the following fails:
`conda update -c conda-canary --all`
Giving:
```
Fetching package metadata: ......
Solving package specifications: .........
Package plan for installation in environment C:\Users\ray\m2-x64-3.5:
The following packages will be downloaded:
package | build
---------------------------|-----------------
conda-env-2.6.0 | 0 498 B
vs2015_runtime-14.0.25123 | 0 1.9 MB
python-3.5.2 | 0 30.3 MB
pycosat-0.6.1 | py35_1 80 KB
pycrypto-2.6.1 | py35_4 481 KB
pywin32-220 | py35_1 10.4 MB
pyyaml-3.12 | py35_0 118 KB
requests-2.12.4 | py35_0 791 KB
ruamel_yaml-0.11.14 | py35_0 217 KB
setuptools-27.2.0 | py35_1 761 KB
menuinst-1.4.2 | py35_1 108 KB
pip-9.0.1 | py35_1 1.7 MB
conda-4.3.0 | py35_1 510 KB
------------------------------------------------------------
Total: 47.3 MB
The following NEW packages will be INSTALLED:
pywin32: 220-py35_1
ruamel_yaml: 0.11.14-py35_0
The following packages will be UPDATED:
conda: 4.0.5-py35_0 --> 4.3.0-py35_1
conda-env: 2.4.5-py35_0 --> 2.6.0-0
menuinst: 1.3.2-py35_0 --> 1.4.2-py35_1
pip: 8.1.1-py35_1 --> 9.0.1-py35_1
pycosat: 0.6.1-py35_0 --> 0.6.1-py35_1
pycrypto: 2.6.1-py35_3 --> 2.6.1-py35_4
python: 3.5.1-4 --> 3.5.2-0
pyyaml: 3.11-py35_3 --> 3.12-py35_0
requests: 2.9.1-py35_0 --> 2.12.4-py35_0
setuptools: 20.3-py35_0 --> 27.2.0-py35_1
vs2015_runtime: 14.00.23026.0-0 --> 14.0.25123-0
Proceed ([y]/n)? y
menuinst-1.4.2 100% |###############################| Time: 0:00:00 2.35 MB/s
Fetching packages ...
conda-env-2.6. 100% |###############################| Time: 0:00:00 0.00 B/s
vs2015_runtime 100% |###############################| Time: 0:00:00 9.24 MB/s
python-3.5.2-0 100% |###############################| Time: 0:00:02 11.57 MB/s
pycosat-0.6.1- 100% |###############################| Time: 0:00:00 2.61 MB/s
pycrypto-2.6.1 100% |###############################| Time: 0:00:00 4.51 MB/s
pywin32-220-py 100% |###############################| Time: 0:00:00 10.85 MB/s
pyyaml-3.12-py 100% |###############################| Time: 0:00:00 2.57 MB/s
requests-2.12. 100% |###############################| Time: 0:00:00 5.76 MB/s
ruamel_yaml-0. 100% |###############################| Time: 0:00:00 2.84 MB/s
setuptools-27. 100% |###############################| Time: 0:00:00 4.53 MB/s
pip-9.0.1-py35 100% |###############################| Time: 0:00:00 5.70 MB/s
conda-4.3.0-py 100% |###############################| Time: 0:00:00 530.91 kB/s
Extracting packages ...
[ COMPLETE ]|##################################################| 100%
Unlinking packages ...
[ COMPLETE ]|##################################################| 100%
Linking packages ...
[ COMPLETE ]|##################################################| 100%
Traceback (most recent call last):
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 516, in conda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\cli\main.py", line 84, in _main
from ..base.context import context
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\context.py", line 12, in <module>
from .constants import (APP_NAME, DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME,
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\constants.py", line 15
PREFIX_PLACEHOLDER = 'C:\Users\ray\m2-x64-3.5'
^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\ray\m2-x64-3.5\Scripts\conda-script.py", line 5, in <module>
sys.exit(conda.cli.main())
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\cli\main.py", line 152, in main
return conda_exception_handler(_main)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 532, in conda_exception_handler
print_unexpected_error_message(e)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 463, in print_unexpected_error_message
from conda.base.context import context
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\context.py", line 12, in <module>
from .constants import (APP_NAME, DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME,
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\constants.py", line 15
PREFIX_PLACEHOLDER = 'C:\Users\ray\m2-x64-3.5'
^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape
Traceback (most recent call last):
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 516, in conda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\cli\main.py", line 84, in _main
from ..base.context import context
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\context.py", line 12, in <module>
from .constants import (APP_NAME, DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME,
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\constants.py", line 15
PREFIX_PLACEHOLDER = 'C:\Users\ray\m2-x64-3.5'
^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\ray\m2-x64-3.5\Scripts\conda-script.py", line 5, in <module>
sys.exit(conda.cli.main())
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\cli\main.py", line 152, in main
return conda_exception_handler(_main)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 532, in conda_exception_handler
print_unexpected_error_message(e)
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\exceptions.py", line 463, in print_unexpected_error_message
from conda.base.context import context
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\context.py", line 12, in <module>
from .constants import (APP_NAME, DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, ROOT_ENV_NAME,
File "C:\Users\ray\m2-x64-3.5\lib\site-packages\conda\base\constants.py", line 15
PREFIX_PLACEHOLDER = 'C:\Users\ray\m2-x64-3.5'
^
SyntaxError: (unicode error) 'unicodeescape' codec can't decode bytes in position 2-3: truncated \UXXXXXXXX escape
```
| conda/base/constants.py
<|code_start|>
# -*- coding: utf-8 -*-
"""
This file should hold most string literals and magic numbers used throughout the code base.
The exception is if a literal is specifically meant to be private to and isolated within a module.
Think of this as a "more static" source of configuration information.
Another important source of "static" configuration is conda/models/enums.py.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from os.path import join
on_win = bool(sys.platform == "win32")
PREFIX_PLACEHOLDER = '/opt/anaconda1anaconda2anaconda3'
machine_bits = 8 * tuple.__itemsize__
APP_NAME = 'conda'
SEARCH_PATH = (
'/etc/conda/condarc',
'/etc/conda/condarc.d/',
'/var/lib/conda/condarc',
'/var/lib/conda/condarc.d/',
'$CONDA_ROOT/condarc',
'$CONDA_ROOT/.condarc',
'$CONDA_ROOT/condarc.d/',
'~/.conda/condarc',
'~/.conda/condarc.d/',
'~/.condarc',
'$CONDA_PREFIX/.condarc',
'$CONDA_PREFIX/condarc.d/',
'$CONDARC',
)
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org'
CONDA_HOMEPAGE_URL = 'https://conda.pydata.org'
DEFAULTS = 'defaults'
PLATFORM_DIRECTORIES = ("linux-64",
"linux-32",
"win-64",
"win-32",
"osx-64",
"linux-ppc64le",
"linux-armv6l",
"linux-armv7l",
"zos-z",
"noarch",
)
RECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file')
DEFAULT_CHANNELS_UNIX = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/r',
'https://repo.continuum.io/pkgs/pro',
)
DEFAULT_CHANNELS_WIN = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/r',
'https://repo.continuum.io/pkgs/pro',
'https://repo.continuum.io/pkgs/msys2',
)
DEFAULT_CHANNELS = DEFAULT_CHANNELS_WIN if on_win else DEFAULT_CHANNELS_UNIX
ROOT_ENV_NAME = 'root'
ROOT_NO_RM = (
'python',
'pycosat',
'ruamel_yaml',
'conda',
'openssl',
'requests',
)
# Maximum priority, reserved for packages we really want to remove
MAX_CHANNEL_PRIORITY = 10000
CONDA_TARBALL_EXTENSION = '.tar.bz2'
PRIVATE_ENVS = join(sys.prefix, "conda-meta/private_envs")
UNKNOWN_CHANNEL = "<unknown>"
INTERRUPT_SIGNALS = (
'SIGABRT',
'SIGINT',
'SIGTERM',
'SIGQUIT',
'SIGBREAK',
)
<|code_end|>
| conda/base/constants.py
# -*- coding: utf-8 -*-
"""
This file should hold most string literals and magic numbers used throughout the code base.
The exception is if a literal is specifically meant to be private to and isolated within a module.
Think of this as a "more static" source of configuration information.
Another important source of "static" configuration is conda/models/enums.py.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from os.path import join
on_win = bool(sys.platform == "win32")
PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts, such that running
# this program on itself will leave it unchanged
'anaconda3')
machine_bits = 8 * tuple.__itemsize__
APP_NAME = 'conda'
SEARCH_PATH = (
'/etc/conda/condarc',
'/etc/conda/condarc.d/',
'/var/lib/conda/condarc',
'/var/lib/conda/condarc.d/',
'$CONDA_ROOT/condarc',
'$CONDA_ROOT/.condarc',
'$CONDA_ROOT/condarc.d/',
'~/.conda/condarc',
'~/.conda/condarc.d/',
'~/.condarc',
'$CONDA_PREFIX/.condarc',
'$CONDA_PREFIX/condarc.d/',
'$CONDARC',
)
DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org'
CONDA_HOMEPAGE_URL = 'http://conda.pydata.org'
DEFAULTS = 'defaults'
PLATFORM_DIRECTORIES = ("linux-64",
"linux-32",
"win-64",
"win-32",
"osx-64",
"linux-ppc64le",
"linux-armv6l",
"linux-armv7l",
"zos-z",
"noarch",
)
RECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file')
DEFAULT_CHANNELS_UNIX = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/r',
'https://repo.continuum.io/pkgs/pro',
)
DEFAULT_CHANNELS_WIN = ('https://repo.continuum.io/pkgs/free',
'https://repo.continuum.io/pkgs/r',
'https://repo.continuum.io/pkgs/pro',
'https://repo.continuum.io/pkgs/msys2',
)
DEFAULT_CHANNELS = DEFAULT_CHANNELS_WIN if on_win else DEFAULT_CHANNELS_UNIX
ROOT_ENV_NAME = 'root'
ROOT_NO_RM = (
'python',
'pycosat',
'ruamel_yaml',
'conda',
'openssl',
'requests',
)
# Maximum priority, reserved for packages we really want to remove
MAX_CHANNEL_PRIORITY = 10000
CONDA_TARBALL_EXTENSION = '.tar.bz2'
PRIVATE_ENVS = join(sys.prefix, "conda-meta/private_envs")
UNKNOWN_CHANNEL = "<unknown>"
INTERRUPT_SIGNALS = (
'SIGABRT',
'SIGINT',
'SIGTERM',
'SIGQUIT',
'SIGBREAK',
)
| conda/base/constants.py
--- a/conda/base/constants.py
+++ b/conda/base/constants.py
@@ -12,7 +12,10 @@
from os.path import join
on_win = bool(sys.platform == "win32")
-PREFIX_PLACEHOLDER = '/opt/anaconda1anaconda2anaconda3'
+PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2'
+ # this is intentionally split into parts, such that running
+ # this program on itself will leave it unchanged
+ 'anaconda3')
machine_bits = 8 * tuple.__itemsize__
|
conda env export failure
Under conda 4.3.1, `conda env export` returns the backtrace:
```Python
Traceback (most recent call last):
File "/home/alan/anaconda/lib/python3.5/site-packages/conda/exceptions.py", line 515, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/home/alan/anaconda/lib/python3.5/site-packages/conda_env/cli/main_export.py", line 94, in execute
ignore_channels=args.ignore_channels)
File "/home/alan/anaconda/lib/python3.5/site-packages/conda_env/env.py", line 62, in from_environment
for dist in installed:
AttributeError: 'str' object has no attribute 'channel'
```
My current conda information:
```
Current conda install:
platform : linux-64
conda version : 4.3.1
conda is private : False
conda-env version : 4.3.1
conda-build version : 2.0.12
python version : 3.5.2.final.0
requests version : 2.12.4
root environment : /home/alan/anaconda (writable)
default environment : /home/alan/anaconda/envs/labs
envs directories : /home/alan/anaconda/envs
package cache : /home/alan/anaconda/pkgs
channel URLs : https://conda.anaconda.org/conda-forge/linux-64
https://conda.anaconda.org/conda-forge/noarch
https://conda.anaconda.org/conda-canary/linux-64
https://conda.anaconda.org/conda-canary/noarch
https://repo.continuum.io/pkgs/free/linux-64
https://repo.continuum.io/pkgs/free/noarch
https://repo.continuum.io/pkgs/r/linux-64
https://repo.continuum.io/pkgs/r/noarch
https://repo.continuum.io/pkgs/pro/linux-64
https://repo.continuum.io/pkgs/pro/noarch
config file : /home/alan/.condarc
offline mode : False
user-agent : conda/4.3.1 requests/2.12.4 CPython/3.5.2 Linux/4.4.0-57-generic debian/stretch/sid glibc/2.23
UID:GID : 1000:1000
```
conda env export failure
Under conda 4.3.1, `conda env export` returns the backtrace:
```Python
Traceback (most recent call last):
File "/home/alan/anaconda/lib/python3.5/site-packages/conda/exceptions.py", line 515, in conda_exception_handler
return_value = func(*args, **kwargs)
File "/home/alan/anaconda/lib/python3.5/site-packages/conda_env/cli/main_export.py", line 94, in execute
ignore_channels=args.ignore_channels)
File "/home/alan/anaconda/lib/python3.5/site-packages/conda_env/env.py", line 62, in from_environment
for dist in installed:
AttributeError: 'str' object has no attribute 'channel'
```
My current conda information:
```
Current conda install:
platform : linux-64
conda version : 4.3.1
conda is private : False
conda-env version : 4.3.1
conda-build version : 2.0.12
python version : 3.5.2.final.0
requests version : 2.12.4
root environment : /home/alan/anaconda (writable)
default environment : /home/alan/anaconda/envs/labs
envs directories : /home/alan/anaconda/envs
package cache : /home/alan/anaconda/pkgs
channel URLs : https://conda.anaconda.org/conda-forge/linux-64
https://conda.anaconda.org/conda-forge/noarch
https://conda.anaconda.org/conda-canary/linux-64
https://conda.anaconda.org/conda-canary/noarch
https://repo.continuum.io/pkgs/free/linux-64
https://repo.continuum.io/pkgs/free/noarch
https://repo.continuum.io/pkgs/r/linux-64
https://repo.continuum.io/pkgs/r/noarch
https://repo.continuum.io/pkgs/pro/linux-64
https://repo.continuum.io/pkgs/pro/noarch
config file : /home/alan/.condarc
offline mode : False
user-agent : conda/4.3.1 requests/2.12.4 CPython/3.5.2 Linux/4.4.0-57-generic debian/stretch/sid glibc/2.23
UID:GID : 1000:1000
```
| conda_env/env.py
<|code_start|>
from __future__ import absolute_import, print_function
import os
from collections import OrderedDict
from conda.base.context import context
from conda.cli import common # TODO: this should never have to import form conda.cli
from conda.core.linked_data import linked
from copy import copy
from itertools import chain
from . import compat, exceptions, yaml
from .pip_util import add_pip_installed
def load_from_directory(directory):
"""Load and return an ``Environment`` from a given ``directory``"""
files = ['environment.yml', 'environment.yaml']
while True:
for f in files:
try:
return from_file(os.path.join(directory, f))
except exceptions.EnvironmentFileNotFound:
pass
old_directory = directory
directory = os.path.dirname(directory)
if directory == old_directory:
break
raise exceptions.EnvironmentFileNotFound(files[0])
# TODO This should lean more on conda instead of divining it from the outside
# TODO tests!!!
def from_environment(name, prefix, no_builds=False, ignore_channels=False):
"""
Get environment object from prefix
Args:
name: The name of environment
prefix: The path of prefix
no_builds: Whether has build requirement
ignore_channels: whether ingore_channels
Returns: Environment obejct
"""
installed = linked(prefix, ignore_channels=ignore_channels)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
if no_builds:
dependencies = ['='.join(a.quad[0:3]) for a in sorted(conda_pkgs)]
else:
dependencies = ['='.join(a.quad[0:3]) for a in sorted(conda_pkgs)]
if len(pip_pkgs) > 0:
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
# conda uses ruamel_yaml which returns a ruamel_yaml.comments.CommentedSeq
# this doesn't dump correctly using pyyaml
channels = list(context.channels)
if not ignore_channels:
for dist in installed:
if dist.channel not in channels:
channels.insert(0, dist.channel)
return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
def from_yaml(yamlstr, **kwargs):
"""Load and return a ``Environment`` from a given ``yaml string``"""
data = yaml.load(yamlstr)
if kwargs is not None:
for key, value in kwargs.items():
data[key] = value
return Environment(**data)
def from_file(filename):
if not os.path.exists(filename):
raise exceptions.EnvironmentFileNotFound(filename)
with open(filename, 'r') as fp:
yamlstr = fp.read()
return from_yaml(yamlstr, filename=filename)
# TODO test explicitly
class Dependencies(OrderedDict):
def __init__(self, raw, *args, **kwargs):
super(Dependencies, self).__init__(*args, **kwargs)
self.raw = raw
self.parse()
def parse(self):
if not self.raw:
return
self.update({'conda': []})
for line in self.raw:
if isinstance(line, dict):
self.update(line)
else:
self['conda'].append(common.arg2spec(line))
# TODO only append when it's not already present
def add(self, package_name):
self.raw.append(package_name)
self.parse()
def unique(seq, key=None):
""" Return only unique elements of a sequence
>>> tuple(unique((1, 2, 3)))
(1, 2, 3)
>>> tuple(unique((1, 2, 1, 3)))
(1, 2, 3)
Uniqueness can be defined by key keyword
>>> tuple(unique(['cat', 'mouse', 'dog', 'hen'], key=len))
('cat', 'mouse')
"""
seen = set()
seen_add = seen.add
if key is None:
for item in seq:
if item not in seen:
seen_add(item)
yield item
else: # calculate key
for item in seq:
val = key(item)
if val not in seen:
seen_add(val)
yield item
class Environment(object):
def __init__(self, name=None, filename=None, channels=None,
dependencies=None, prefix=None):
self.name = name
self.filename = filename
self.prefix = prefix
self.dependencies = Dependencies(dependencies)
if channels is None:
channels = []
self.channels = channels
def add_channels(self, channels):
self.channels = list(unique(chain.from_iterable((channels, self.channels))))
def remove_channels(self):
self.channels = []
def to_dict(self):
d = yaml.dict([('name', self.name)])
if self.channels:
d['channels'] = self.channels
if self.dependencies:
d['dependencies'] = self.dependencies.raw
if self.prefix:
d['prefix'] = self.prefix
return d
def to_yaml(self, stream=None):
d = self.to_dict()
out = compat.u(yaml.dump(d, default_flow_style=False))
if stream is None:
return out
stream.write(compat.b(out, encoding="utf-8"))
def save(self):
with open(self.filename, "wb") as fp:
self.to_yaml(stream=fp)
<|code_end|>
| conda_env/env.py
from __future__ import absolute_import, print_function
import os
from collections import OrderedDict
from conda.base.context import context
from conda.cli import common # TODO: this should never have to import form conda.cli
from conda.core.linked_data import linked
from copy import copy
from itertools import chain
from . import compat, exceptions, yaml
from .pip_util import add_pip_installed
def load_from_directory(directory):
"""Load and return an ``Environment`` from a given ``directory``"""
files = ['environment.yml', 'environment.yaml']
while True:
for f in files:
try:
return from_file(os.path.join(directory, f))
except exceptions.EnvironmentFileNotFound:
pass
old_directory = directory
directory = os.path.dirname(directory)
if directory == old_directory:
break
raise exceptions.EnvironmentFileNotFound(files[0])
# TODO This should lean more on conda instead of divining it from the outside
# TODO tests!!!
def from_environment(name, prefix, no_builds=False, ignore_channels=False):
"""
Get environment object from prefix
Args:
name: The name of environment
prefix: The path of prefix
no_builds: Whether has build requirement
ignore_channels: whether ignore_channels
Returns: Environment object
"""
installed = linked(prefix, ignore_channels=ignore_channels)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
if no_builds:
dependencies = ['='.join(a.quad[0:3]) for a in sorted(conda_pkgs)]
else:
dependencies = ['='.join(a.quad[0:3]) for a in sorted(conda_pkgs)]
if len(pip_pkgs) > 0:
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
# conda uses ruamel_yaml which returns a ruamel_yaml.comments.CommentedSeq
# this doesn't dump correctly using pyyaml
channels = list(context.channels)
if not ignore_channels:
for dist in conda_pkgs:
if dist.channel not in channels:
channels.insert(0, dist.channel)
return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
def from_yaml(yamlstr, **kwargs):
"""Load and return a ``Environment`` from a given ``yaml string``"""
data = yaml.load(yamlstr)
if kwargs is not None:
for key, value in kwargs.items():
data[key] = value
return Environment(**data)
def from_file(filename):
if not os.path.exists(filename):
raise exceptions.EnvironmentFileNotFound(filename)
with open(filename, 'r') as fp:
yamlstr = fp.read()
return from_yaml(yamlstr, filename=filename)
# TODO test explicitly
class Dependencies(OrderedDict):
def __init__(self, raw, *args, **kwargs):
super(Dependencies, self).__init__(*args, **kwargs)
self.raw = raw
self.parse()
def parse(self):
if not self.raw:
return
self.update({'conda': []})
for line in self.raw:
if isinstance(line, dict):
self.update(line)
else:
self['conda'].append(common.arg2spec(line))
# TODO only append when it's not already present
def add(self, package_name):
self.raw.append(package_name)
self.parse()
def unique(seq, key=None):
""" Return only unique elements of a sequence
>>> tuple(unique((1, 2, 3)))
(1, 2, 3)
>>> tuple(unique((1, 2, 1, 3)))
(1, 2, 3)
Uniqueness can be defined by key keyword
>>> tuple(unique(['cat', 'mouse', 'dog', 'hen'], key=len))
('cat', 'mouse')
"""
seen = set()
seen_add = seen.add
if key is None:
for item in seq:
if item not in seen:
seen_add(item)
yield item
else: # calculate key
for item in seq:
val = key(item)
if val not in seen:
seen_add(val)
yield item
class Environment(object):
def __init__(self, name=None, filename=None, channels=None,
dependencies=None, prefix=None):
self.name = name
self.filename = filename
self.prefix = prefix
self.dependencies = Dependencies(dependencies)
if channels is None:
channels = []
self.channels = channels
def add_channels(self, channels):
self.channels = list(unique(chain.from_iterable((channels, self.channels))))
def remove_channels(self):
self.channels = []
def to_dict(self):
d = yaml.dict([('name', self.name)])
if self.channels:
d['channels'] = self.channels
if self.dependencies:
d['dependencies'] = self.dependencies.raw
if self.prefix:
d['prefix'] = self.prefix
return d
def to_yaml(self, stream=None):
d = self.to_dict()
out = compat.u(yaml.dump(d, default_flow_style=False))
if stream is None:
return out
stream.write(compat.b(out, encoding="utf-8"))
def save(self):
with open(self.filename, "wb") as fp:
self.to_yaml(stream=fp)
| conda_env/env.py
--- a/conda_env/env.py
+++ b/conda_env/env.py
@@ -36,10 +36,9 @@ def from_environment(name, prefix, no_builds=False, ignore_channels=False):
name: The name of environment
prefix: The path of prefix
no_builds: Whether has build requirement
- ignore_channels: whether ingore_channels
-
- Returns: Environment obejct
+ ignore_channels: whether ignore_channels
+ Returns: Environment object
"""
installed = linked(prefix, ignore_channels=ignore_channels)
conda_pkgs = copy(installed)
@@ -58,7 +57,7 @@ def from_environment(name, prefix, no_builds=False, ignore_channels=False):
# this doesn't dump correctly using pyyaml
channels = list(context.channels)
if not ignore_channels:
- for dist in installed:
+ for dist in conda_pkgs:
if dist.channel not in channels:
channels.insert(0, dist.channel)
return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix) |
non json output on root clone
I understand that cloning root env is probably a not very good idea... but even then with the json flag, we get this non json output
```
$ conda create -n rootclone --clone root --json`
The following packages cannot be cloned out of the root environment:
- conda-build-2.0.2-py35_0
- conda-4.2.13-py35_0
... (valid Json output)
```
Possible fixes according to @kalefranz
- Send that output to stderr
- Just eat that output
- Please fill in with your suggestion ...
| conda/misc.py
<|code_start|>
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import absolute_import, division, print_function, unicode_literals
from collections import defaultdict
import os
from os.path import (abspath, dirname, exists, expanduser, isdir, isfile, islink, join,
relpath)
import re
import shutil
import sys
from ._vendor.auxlib.path import expand
from .base.context import context
from .common.compat import iteritems, iterkeys, itervalues, on_win
from .common.path import url_to_path, win_path_ok
from .common.url import is_url, join_url, path_to_url
from .core.index import get_index, supplement_index_with_cache
from .core.linked_data import linked_data
from .core.package_cache import PackageCache, ProgressiveFetchExtract
from .exceptions import CondaFileNotFoundError, CondaRuntimeError, ParseError
from .gateways.disk.delete import rm_rf
from .instructions import LINK, UNLINK
from .models.dist import Dist
from .models.index_record import IndexRecord
from .plan import execute_actions
from .resolve import MatchSpec, Resolve
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist, meta in iteritems(linked_data(prefix)):
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta.get('files', ())))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
fetch_recs = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
if not is_url(spec):
spec = path_to_url(expand(spec))
# parse URL
m = url_pat.match(spec)
if m is None:
raise ParseError('Could not parse explicit URL: %s' % spec)
url_p, fn, md5sum = m.group('url_p'), m.group('fn'), m.group('md5')
url = join_url(url_p, fn)
# url_p is everything but the tarball_basename and the md5sum
# If the path points to a file in the package cache, we need to use
# the dist name that corresponds to that package. The MD5 may not
# match, but we will let PFE below worry about that
dist = None
if url.startswith('file:/'):
path = win_path_ok(url_to_path(url))
if dirname(path) in context.pkgs_dirs:
if not exists(path):
raise CondaFileNotFoundError(path)
pc_entry = PackageCache.tarball_file_in_cache(path)
dist = pc_entry.dist
url = dist.to_url() or pc_entry.get_urls_txt_value()
md5sum = md5sum or pc_entry.md5sum
dist = dist or Dist(url)
fetch_recs[dist] = {'md5': md5sum, 'url': url}
# perform any necessary fetches and extractions
if verbose:
from .console import setup_verbose_handlers
setup_verbose_handlers()
link_dists = tuple(iterkeys(fetch_recs))
pfe = ProgressiveFetchExtract(fetch_recs, link_dists)
pfe.execute()
# Now get the index---but the only index we need is the package cache
index = {}
supplement_index_with_cache(index, ())
# unlink any installed packages with same package name
link_names = {index[d]['name'] for d in link_dists}
actions[UNLINK].extend(d for d, r in iteritems(linked_data(prefix))
if r['name'] in link_names)
actions[LINK].extend(link_dists)
execute_actions(actions, index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if on_win and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if on_win and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if on_win and exists(join(context.root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda, conda-env and any package that depends on them
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
if name == "conda-env":
filter["conda-env"] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
print('The following packages cannot be cloned out of the root environment:')
for pkg in itervalues(filter):
print(' - ' + pkg.dist_name)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
index_args = index_args or {}
index = get_index(**index_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = dist.dist_name
fn = dist.to_filename()
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist_str = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[Dist(dist_str)] = r.index[dist_str]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, context.subdir, notfound)
raise CondaRuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist
if fkey not in index:
index[fkey] = IndexRecord.from_objects(info, not_fetched=True)
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort({d.quad[0]: d for d in urls.keys()})
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
rm_rf(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, index_args=index_args)
return actions, untracked_files
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in context.envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield context.root_dir
<|code_end|>
| conda/misc.py
# this module contains miscellaneous stuff which enventually could be moved
# into other places
from __future__ import absolute_import, division, print_function, unicode_literals
from collections import defaultdict
import os
from os.path import (abspath, dirname, exists, expanduser, isdir, isfile, islink, join,
relpath)
import re
import shutil
import sys
from ._vendor.auxlib.path import expand
from .base.context import context
from .common.compat import iteritems, iterkeys, itervalues, on_win
from .common.path import url_to_path, win_path_ok
from .common.url import is_url, join_url, path_to_url
from .core.index import get_index, supplement_index_with_cache
from .core.linked_data import linked_data
from .core.package_cache import PackageCache, ProgressiveFetchExtract
from .exceptions import CondaFileNotFoundError, CondaRuntimeError, ParseError
from .gateways.disk.delete import rm_rf
from .instructions import LINK, UNLINK
from .models.dist import Dist
from .models.index_record import IndexRecord
from .plan import execute_actions
from .resolve import MatchSpec, Resolve
def conda_installed_files(prefix, exclude_self_build=False):
"""
Return the set of files which have been installed (using conda) into
a given prefix.
"""
res = set()
for dist, meta in iteritems(linked_data(prefix)):
if exclude_self_build and 'file_hash' in meta:
continue
res.update(set(meta.get('files', ())))
return res
url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+\.tar\.bz2)'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
fetch_recs = {}
for spec in specs:
if spec == '@EXPLICIT':
continue
if not is_url(spec):
spec = path_to_url(expand(spec))
# parse URL
m = url_pat.match(spec)
if m is None:
raise ParseError('Could not parse explicit URL: %s' % spec)
url_p, fn, md5sum = m.group('url_p'), m.group('fn'), m.group('md5')
url = join_url(url_p, fn)
# url_p is everything but the tarball_basename and the md5sum
# If the path points to a file in the package cache, we need to use
# the dist name that corresponds to that package. The MD5 may not
# match, but we will let PFE below worry about that
dist = None
if url.startswith('file:/'):
path = win_path_ok(url_to_path(url))
if dirname(path) in context.pkgs_dirs:
if not exists(path):
raise CondaFileNotFoundError(path)
pc_entry = PackageCache.tarball_file_in_cache(path)
dist = pc_entry.dist
url = dist.to_url() or pc_entry.get_urls_txt_value()
md5sum = md5sum or pc_entry.md5sum
dist = dist or Dist(url)
fetch_recs[dist] = {'md5': md5sum, 'url': url}
# perform any necessary fetches and extractions
if verbose:
from .console import setup_verbose_handlers
setup_verbose_handlers()
link_dists = tuple(iterkeys(fetch_recs))
pfe = ProgressiveFetchExtract(fetch_recs, link_dists)
pfe.execute()
# Now get the index---but the only index we need is the package cache
index = {}
supplement_index_with_cache(index, ())
# unlink any installed packages with same package name
link_names = {index[d]['name'] for d in link_dists}
actions[UNLINK].extend(d for d, r in iteritems(linked_data(prefix))
if r['name'] in link_names)
actions[LINK].extend(link_dists)
execute_actions(actions, index, verbose=verbose)
return actions
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if on_win and windows_forward_slashes:
res = res.replace('\\', '/')
return res
def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=True):
"""
Return the set of all files in a given prefix directory.
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
for fn in os.listdir(prefix):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
res.add(relpath(join(root, fn2), prefix))
for dn in dirs:
path = join(root, dn)
if islink(path):
res.add(relpath(path, prefix))
if on_win and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
else:
return res
def untracked(prefix, exclude_self_build=False):
"""
Return (the set) of all untracked files for a given prefix.
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {path for path in walk_prefix(prefix) - conda_files
if not (path.endswith('~') or
(sys.platform == 'darwin' and path.endswith('.DS_Store')) or
(path.endswith('.pyc') and path[:-1] in conda_files))}
def which_prefix(path):
"""
given the path (to a (presumably) conda installed file) return the
environment prefix in which the file in located
"""
prefix = abspath(path)
while True:
if isdir(join(prefix, 'conda-meta')):
# we found the it, so let's return it
return prefix
if prefix == dirname(prefix):
# we cannot chop off any more directories, so we didn't find it
return None
prefix = dirname(prefix)
def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if on_win and exists(join(context.root_dir, '.nonadmin')):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
def append_env(prefix):
dir_path = abspath(expanduser('~/.conda'))
try:
if not isdir(dir_path):
os.mkdir(dir_path)
with open(join(dir_path, 'environments.txt'), 'a') as f:
f.write('%s\n' % prefix)
except IOError:
pass
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
"""
clone existing prefix1 into new prefix2
"""
untracked_files = untracked(prefix1)
# Discard conda, conda-env and any package that depends on them
drecs = linked_data(prefix1)
filter = {}
found = True
while found:
found = False
for dist, info in iteritems(drecs):
name = info['name']
if name in filter:
continue
if name == 'conda':
filter['conda'] = dist
found = True
break
if name == "conda-env":
filter["conda-env"] = dist
found = True
break
for dep in info.get('depends', []):
if MatchSpec(dep).name in filter:
filter[name] = dist
found = True
if filter:
if not quiet:
fh = sys.stderr if context.json else sys.stdout
print('The following packages cannot be cloned out of the root environment:', file=fh)
for pkg in itervalues(filter):
print(' - ' + pkg.dist_name, file=fh)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs
r = None
index = {}
unknowns = [dist for dist, info in iteritems(drecs) if not info.get('url')]
notfound = []
if unknowns:
index_args = index_args or {}
index = get_index(**index_args)
r = Resolve(index, sort=True)
for dist in unknowns:
name = dist.dist_name
fn = dist.to_filename()
fkeys = [d for d in r.index.keys() if r.index[d]['fn'] == fn]
if fkeys:
del drecs[dist]
dist_str = sorted(fkeys, key=r.version_key, reverse=True)[0]
drecs[Dist(dist_str)] = r.index[dist_str]
else:
notfound.append(fn)
if notfound:
what = "Package%s " % ('' if len(notfound) == 1 else 's')
notfound = '\n'.join(' - ' + fn for fn in notfound)
msg = '%s missing in current %s channels:%s' % (what, context.subdir, notfound)
raise CondaRuntimeError(msg)
# Assemble the URL and channel list
urls = {}
for dist, info in iteritems(drecs):
fkey = dist
if fkey not in index:
index[fkey] = IndexRecord.from_objects(info, not_fetched=True)
r = None
urls[dist] = info['url']
if r is None:
r = Resolve(index)
dists = r.dependency_sort({d.quad[0]: d for d in urls.keys()})
urls = [urls[d] for d in dists]
if verbose:
print('Packages: %d' % len(dists))
print('Files: %d' % len(untracked_files))
for f in untracked_files:
src = join(prefix1, f)
dst = join(prefix2, f)
dst_dir = dirname(dst)
if islink(dst_dir) or isfile(dst_dir):
rm_rf(dst_dir)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if islink(src):
os.symlink(os.readlink(src), dst)
continue
try:
with open(src, 'rb') as fi:
data = fi.read()
except IOError:
continue
try:
s = data.decode('utf-8')
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
except UnicodeDecodeError: # data is binary
pass
with open(dst, 'wb') as fo:
fo.write(data)
shutil.copystat(src, dst)
actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, index_args=index_args)
return actions, untracked_files
def make_icon_url(info):
if info.get('channel') and info.get('icon'):
base_url = dirname(info['channel'])
icon_fn = info['icon']
# icon_cache_path = join(pkgs_dir, 'cache', icon_fn)
# if isfile(icon_cache_path):
# return url_path(icon_cache_path)
return '%s/icons/%s' % (base_url, icon_fn)
return ''
def list_prefixes():
# Lists all the prefixes that conda knows about.
for envs_dir in context.envs_dirs:
if not isdir(envs_dir):
continue
for dn in sorted(os.listdir(envs_dir)):
if dn.startswith('.'):
continue
prefix = join(envs_dir, dn)
if isdir(prefix):
prefix = join(envs_dir, dn)
yield prefix
yield context.root_dir
| conda/misc.py
--- a/conda/misc.py
+++ b/conda/misc.py
@@ -224,9 +224,10 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
if filter:
if not quiet:
- print('The following packages cannot be cloned out of the root environment:')
+ fh = sys.stderr if context.json else sys.stdout
+ print('The following packages cannot be cloned out of the root environment:', file=fh)
for pkg in itervalues(filter):
- print(' - ' + pkg.dist_name)
+ print(' - ' + pkg.dist_name, file=fh)
drecs = {dist: info for dist, info in iteritems(drecs) if info['name'] not in filter}
# Resolve URLs for packages that do not have URLs |
Conda 4.3.4 seems to be broken
```
bag@bag ~ % conda --version
conda 4.3.4
bag@bag ~ % conda install -y --file https://raw.githubusercontent.com/bioconda/bioconda-utils/master/bioconda_utils/bioconda_utils-requirements.txt
CondaError: Downloaded bytes did not match Content-Length
url: https://raw.githubusercontent.com/bioconda/bioconda-utils/master/bioconda_utils/bioconda_utils-requirements.txt
target_path: /tmp/tmpUp36pQ/bioconda_utils-requirements.txt
Content-Length: 195
downloaded bytes: 310
bag@bag ~ %
```
| conda/gateways/download.py
<|code_start|>
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import hashlib
from logging import getLogger
from os.path import exists, basename
from threading import Lock
import warnings
from requests.exceptions import ConnectionError, HTTPError, SSLError
from .. import CondaError
from .._vendor.auxlib.ish import dals
from ..base.context import context
from ..connection import CondaSession
from ..exceptions import BasicClobberError, CondaHTTPError, MD5MismatchError, maybe_raise
log = getLogger(__name__)
class SingleThreadCondaSession(CondaSession):
# according to http://stackoverflow.com/questions/18188044/is-the-session-object-from-pythons-requests-library-thread-safe # NOQA
# request's Session isn't thread-safe for us
_session = None
_mutex = Lock()
def __init__(self):
super(SingleThreadCondaSession, self).__init__()
def __enter__(self):
session = SingleThreadCondaSession._session
if session is None:
session = SingleThreadCondaSession._session = self
SingleThreadCondaSession._mutex.acquire()
return session
def __exit__(self, exc_type, exc_val, exc_tb):
SingleThreadCondaSession._mutex.release()
def disable_ssl_verify_warning():
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
def download(url, target_full_path, md5sum):
content_length = None
if exists(target_full_path):
maybe_raise(BasicClobberError(target_full_path, url, context))
if not context.ssl_verify:
disable_ssl_verify_warning()
try:
timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs
with SingleThreadCondaSession() as session:
resp = session.get(url, stream=True, proxies=session.proxies, timeout=timeout)
resp.raise_for_status()
content_length = int(resp.headers.get('Content-Length', 0))
if content_length:
getLogger('fetch.start').info((basename(target_full_path)[:14], content_length))
digest_builder = hashlib.new('md5')
try:
with open(target_full_path, 'wb') as fh:
streamed_bytes = 0
for chunk in resp.iter_content(2 ** 14):
streamed_bytes += len(chunk)
try:
fh.write(chunk)
except IOError as e:
message = "Failed to write to %(target_path)s\n errno: %(errno)d"
# TODO: make this CondaIOError
raise CondaError(message, target_path=target_full_path, errno=e.errno)
digest_builder.update(chunk)
if content_length and 0 <= streamed_bytes <= content_length:
getLogger('fetch.update').info(streamed_bytes)
if content_length and streamed_bytes != content_length:
# TODO: needs to be a more-specific error type
message = dals("""
Downloaded bytes did not match Content-Length
url: %(url)s
target_path: %(target_path)s
Content-Length: %(content_length)d
downloaded bytes: %(downloaded_bytes)d
""")
raise CondaError(message, url=url, target_path=target_full_path,
content_length=content_length,
downloaded_bytes=streamed_bytes)
except (IOError, OSError) as e:
if e.errno == 104:
# Connection reset by peer
log.debug("%s, trying again" % e)
raise
if md5sum and digest_builder.hexdigest() != md5sum:
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, digest_builder.hexdigest(), md5sum))
# TODO: refactor this exception
raise MD5MismatchError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, digest_builder.hexdigest(), md5sum))
except (ConnectionError, HTTPError, SSLError) as e:
# status_code might not exist on SSLError
help_message = "An HTTP error occurred when trying to retrieve this URL.\n%r" % e
raise CondaHTTPError(help_message,
getattr(e.response, 'url', None),
getattr(e.response, 'status_code', None),
getattr(e.response, 'reason', None),
getattr(e.response, 'elapsed', None))
finally:
if content_length:
getLogger('fetch.stop').info(None)
<|code_end|>
| conda/gateways/download.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import hashlib
from logging import getLogger
from os.path import exists, basename
from threading import Lock
import warnings
from requests.exceptions import ConnectionError, HTTPError, SSLError
from .. import CondaError
from .._vendor.auxlib.ish import dals
from ..base.context import context
from ..connection import CondaSession
from ..exceptions import BasicClobberError, CondaHTTPError, MD5MismatchError, maybe_raise
log = getLogger(__name__)
class SingleThreadCondaSession(CondaSession):
# according to http://stackoverflow.com/questions/18188044/is-the-session-object-from-pythons-requests-library-thread-safe # NOQA
# request's Session isn't thread-safe for us
_session = None
_mutex = Lock()
def __init__(self):
super(SingleThreadCondaSession, self).__init__()
def __enter__(self):
session = SingleThreadCondaSession._session
if session is None:
session = SingleThreadCondaSession._session = self
SingleThreadCondaSession._mutex.acquire()
return session
def __exit__(self, exc_type, exc_val, exc_tb):
SingleThreadCondaSession._mutex.release()
def disable_ssl_verify_warning():
try:
from requests.packages.urllib3.connectionpool import InsecureRequestWarning
except ImportError:
pass
else:
warnings.simplefilter('ignore', InsecureRequestWarning)
def download(url, target_full_path, md5sum):
content_length = None
if exists(target_full_path):
maybe_raise(BasicClobberError(target_full_path, url, context))
if not context.ssl_verify:
disable_ssl_verify_warning()
try:
timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs
with SingleThreadCondaSession() as session:
resp = session.get(url, stream=True, proxies=session.proxies, timeout=timeout)
resp.raise_for_status()
content_length = int(resp.headers.get('Content-Length', 0))
if content_length:
getLogger('fetch.start').info((basename(target_full_path)[:14], content_length))
digest_builder = hashlib.new('md5')
try:
with open(target_full_path, 'wb') as fh:
streamed_bytes = 0
for chunk in resp.iter_content(2 ** 14):
streamed_bytes += len(chunk)
try:
fh.write(chunk)
except IOError as e:
message = "Failed to write to %(target_path)s\n errno: %(errno)d"
# TODO: make this CondaIOError
raise CondaError(message, target_path=target_full_path, errno=e.errno)
digest_builder.update(chunk)
if content_length and 0 <= streamed_bytes <= content_length:
getLogger('fetch.update').info(streamed_bytes)
if content_length and streamed_bytes != content_length:
# TODO: needs to be a more-specific error type
message = dals("""
Downloaded bytes did not match Content-Length
url: %(url)s
target_path: %(target_path)s
Content-Length: %(content_length)d
downloaded bytes: %(downloaded_bytes)d
""")
# raise CondaError(message, url=url, target_path=target_full_path,
# content_length=content_length,
# downloaded_bytes=streamed_bytes)
log.info(message)
except (IOError, OSError) as e:
if e.errno == 104:
# Connection reset by peer
log.debug("%s, trying again" % e)
raise
if md5sum and digest_builder.hexdigest() != md5sum:
log.debug("MD5 sums mismatch for download: %s (%s != %s), "
"trying again" % (url, digest_builder.hexdigest(), md5sum))
# TODO: refactor this exception
raise MD5MismatchError("MD5 sums mismatch for download: %s (%s != %s)"
% (url, digest_builder.hexdigest(), md5sum))
except (ConnectionError, HTTPError, SSLError) as e:
# status_code might not exist on SSLError
help_message = "An HTTP error occurred when trying to retrieve this URL.\n%r" % e
raise CondaHTTPError(help_message,
getattr(e.response, 'url', None),
getattr(e.response, 'status_code', None),
getattr(e.response, 'reason', None),
getattr(e.response, 'elapsed', None))
finally:
if content_length:
getLogger('fetch.stop').info(None)
| conda/gateways/download.py
--- a/conda/gateways/download.py
+++ b/conda/gateways/download.py
@@ -95,9 +95,10 @@ def download(url, target_full_path, md5sum):
Content-Length: %(content_length)d
downloaded bytes: %(downloaded_bytes)d
""")
- raise CondaError(message, url=url, target_path=target_full_path,
- content_length=content_length,
- downloaded_bytes=streamed_bytes)
+ # raise CondaError(message, url=url, target_path=target_full_path,
+ # content_length=content_length,
+ # downloaded_bytes=streamed_bytes)
+ log.info(message)
except (IOError, OSError) as e:
if e.errno == 104: |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.