Instruction stringlengths 6 217k | input_code stringlengths 0 1.21M | output_code stringlengths 10 1.22M | diff_patch stringlengths 0 759k |
|---|---|---|---|
Don't bail when a dependency can't be found
When a dependency for a package can't be found, conda bails completely, but this can happen e.g., just for some old builds of something. So we should just exclude any package like this from the solver.
| conda/resolve.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from itertools import combinations
from collections import defaultdict
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints)
from conda.console import setup_handlers
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
if version.endswith('.dev'):
version += '0'
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
return version
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise RuntimeError("No packages found matching: %s" % ms)
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) ==\
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise RuntimeError("No packages found matching: %s" % ms)
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
continue
res[pkg2.fn] = pkg2
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield [-v1, -v2]
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield clause
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield clause
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield clause
# Don't instlal any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield [-v[fn]]
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield clause
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
continue
dists.update(self.all_deps(pkg.fn, max_only=max_only))
dists[pkg.fn] = pkg
return dists
def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
dists = self.get_dists(specs, max_only=True)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = self.gen_clauses(v, dists, specs, features)
solutions = min_sat(clauses)
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = list(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Check the common case first
dotlog.debug("Building the constraint with rhs: [0, 0]")
constraints = list(generate_constraints(eq, m, [0, 0], alg=alg))
# Only relevant for build_BDD
if constraints and constraints[0] == [false]:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and constraints[0] == [true]:
constraints = []
dotlog.debug("Checking for solutions with rhs: [0, 0]")
solution = sat(clauses + constraints)
if not solution:
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: ')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
def version_constraints(lo, hi):
return list(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
constraints = bisect_constraints(0, max_rhs, clauses, version_constraints)
dotlog.debug("Finding the minimal solution")
solutions = min_sat(clauses + constraints, N=m+1)
assert solutions, (specs, features)
if len(solutions) > 1:
print('Warning:', len(solutions), "possible package resolutions:")
for sol in solutions:
print('\t', [w[lit] for lit in sol if 0 < lit <= m])
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
hint = []
# Try to find the largest satisfiable subset
found = False
for i in range(len(specs), 0, -1):
if found:
break
for comb in combinations(specs, i):
try:
self.solve2(comb, features, guess=False)
except RuntimeError:
pass
else:
rem = set(specs) - set(comb)
rem.discard('conda')
if len(rem) == 1:
hint.append("%s" % rem.pop())
else:
hint.append("%s" % ' and '.join(rem))
found = True
if not hint:
return ''
if len(hint) == 1:
return ("\nHint: %s has a conflict with the remaining packages" %
hint[0])
return ("""
Hint: the following combinations of packages create a conflict with the
remaining packages:
- %s""" % '\n - '.join(hint))
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
<|code_end|>
| conda/resolve.py
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from itertools import combinations
from collections import defaultdict
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints)
from conda.console import setup_handlers
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
if version.endswith('.dev'):
version += '0'
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
return version
class NoPackagesFound(RuntimeError):
def __init__(self, msg, pkg):
super(NoPackagesFound, self).__init__(msg)
self.pkg = pkg
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) ==\
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
continue
res[pkg2.fn] = pkg2
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield [-v1, -v2]
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield clause
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield clause
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield clause
# Don't instlal any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield [-v[fn]]
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield clause
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
found = False
notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
found = True
continue
try:
dists.update(self.all_deps(pkg.fn, max_only=max_only))
except NoPackagesFound as e:
# Ignore any package that has nonexisting dependencies.
notfound.append(e.pkg)
else:
dists[pkg.fn] = pkg
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None)
return dists
def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
try:
dists = self.get_dists(specs, max_only=True)
except NoPackagesFound:
# Handle packages that are not included because some dependencies
# couldn't be found.
pass
else:
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = self.gen_clauses(v, dists, specs, features)
solutions = min_sat(clauses)
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = list(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Check the common case first
dotlog.debug("Building the constraint with rhs: [0, 0]")
constraints = list(generate_constraints(eq, m, [0, 0], alg=alg))
# Only relevant for build_BDD
if constraints and constraints[0] == [false]:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and constraints[0] == [true]:
constraints = []
dotlog.debug("Checking for solutions with rhs: [0, 0]")
solution = sat(clauses + constraints)
if not solution:
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: ')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
def version_constraints(lo, hi):
return list(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
constraints = bisect_constraints(0, max_rhs, clauses, version_constraints)
dotlog.debug("Finding the minimal solution")
solutions = min_sat(clauses + constraints, N=m+1)
assert solutions, (specs, features)
if len(solutions) > 1:
print('Warning:', len(solutions), "possible package resolutions:")
for sol in solutions:
print('\t', [w[lit] for lit in sol if 0 < lit <= m])
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
hint = []
# Try to find the largest satisfiable subset
found = False
for i in range(len(specs), 0, -1):
if found:
break
for comb in combinations(specs, i):
try:
self.solve2(comb, features, guess=False)
except RuntimeError:
pass
else:
rem = set(specs) - set(comb)
rem.discard('conda')
if len(rem) == 1:
hint.append("%s" % rem.pop())
else:
hint.append("%s" % ' and '.join(rem))
found = True
if not hint:
return ''
if len(hint) == 1:
return ("\nHint: %s has a conflict with the remaining packages" %
hint[0])
return ("""
Hint: the following combinations of packages create a conflict with the
remaining packages:
- %s""" % '\n - '.join(hint))
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
| conda/resolve.py
--- a/conda/resolve.py
+++ b/conda/resolve.py
@@ -30,6 +30,11 @@ def normalized_version(version):
return version
+class NoPackagesFound(RuntimeError):
+ def __init__(self, msg, pkg):
+ super(NoPackagesFound, self).__init__(msg)
+ self.pkg = pkg
+
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
@@ -243,7 +248,7 @@ def track_features(self, fn):
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
- raise RuntimeError("No packages found matching: %s" % ms)
+ raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
if max_only:
maxpkg = max(pkgs)
ret = []
@@ -262,7 +267,7 @@ def get_pkgs(self, ms, max_only=False):
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
- raise RuntimeError("No packages found matching: %s" % ms)
+ raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
for pkg in pkgs:
yield pkg.fn
@@ -371,11 +376,22 @@ def generate_version_eq(self, v, dists, include0=False):
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
+ found = False
+ notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
+ found = True
continue
- dists.update(self.all_deps(pkg.fn, max_only=max_only))
- dists[pkg.fn] = pkg
+ try:
+ dists.update(self.all_deps(pkg.fn, max_only=max_only))
+ except NoPackagesFound as e:
+ # Ignore any package that has nonexisting dependencies.
+ notfound.append(e.pkg)
+ else:
+ dists[pkg.fn] = pkg
+ found = True
+ if not found:
+ raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None)
return dists
@@ -387,25 +403,31 @@ def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
- dists = self.get_dists(specs, max_only=True)
-
- v = {} # map fn to variable number
- w = {} # map variable number to fn
- i = -1 # in case the loop doesn't run
- for i, fn in enumerate(sorted(dists)):
- v[fn] = i + 1
- w[i + 1] = fn
- m = i + 1
-
- dotlog.debug("Solving using max dists only")
- clauses = self.gen_clauses(v, dists, specs, features)
- solutions = min_sat(clauses)
-
- if len(solutions) == 1:
- ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
- if returnall:
- return [ret]
- return ret
+ try:
+ dists = self.get_dists(specs, max_only=True)
+ except NoPackagesFound:
+ # Handle packages that are not included because some dependencies
+ # couldn't be found.
+ pass
+ else:
+ v = {} # map fn to variable number
+ w = {} # map variable number to fn
+ i = -1 # in case the loop doesn't run
+ for i, fn in enumerate(sorted(dists)):
+ v[fn] = i + 1
+ w[i + 1] = fn
+ m = i + 1
+
+ dotlog.debug("Solving using max dists only")
+ clauses = self.gen_clauses(v, dists, specs, features)
+ solutions = min_sat(clauses)
+
+
+ if len(solutions) == 1:
+ ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
+ if returnall:
+ return [ret]
+ return ret
dists = self.get_dists(specs)
|
Make the conda install table easier to read
The table of what packages will be installed and removed is hard to read. For one thing, it's hard to tell easily what packages are not removed but just upgraded or downgraded. Also, the "link" terminology is confusing.
A suggestion by @jklowden:
```
$ conda update conda
Updating Anaconda environment at /usr/local/anaconda
The following packages will be downloaded:
conda-2.2.3-py27_0.tar.bz2
[http://repo.continuum.io/pkgs/free/osx-64/]
The following packages will be upgraded:
Old version Replace with
------------------------- -------------------------
conda-1.4.4 conda-2.2.3
```
> or, if you really want the build (I don't, it's not meaningful to the user)
```
package Old version New version
------------ ------------------ ------------------
conda 1.4.4, py27_0 2.2.3, py27_0
```
I think the build is meaningful as it tells you what Python version is being used. It also tells you if you are using mkl. And also some people might use the build string to put other information which may be useful to users.
<!---
@huboard:{"order":3.3142282405143226e-49,"custom_state":""}
-->
| conda/cli/main_remove.py
<|code_start|>
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from argparse import RawDescriptionHelpFormatter
from conda.cli import common
help = "Remove a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution.
"""
example = """
examples:
conda remove -n myenv scipy
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'remove',
formatter_class = RawDescriptionHelpFormatter,
description = descr,
help = help,
epilog = example,
)
common.add_parser_yes(p)
p.add_argument(
"--all",
action = "store_true",
help = "remove all packages, i.e. the entire environment",
)
p.add_argument(
"--features",
action = "store_true",
help = "remove features (instead of packages)",
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
p.add_argument(
'package_names',
metavar = 'package_name',
action = "store",
nargs = '*',
help = "package names to remove from environment",
)
p.set_defaults(func=execute)
def execute(args, parser):
import sys
import conda.plan as plan
from conda.api import get_index
from conda.cli import pscheck
from conda.install import rm_rf, linked
if not (args.all or args.package_names):
sys.exit('Error: no package names supplied,\n'
' try "conda remove -h" for more details')
prefix = common.get_prefix(args)
common.check_write('remove', prefix)
index = None
if args.features:
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
index = get_index(channel_urls=channel_urls,
prepend=not args.override_channels)
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
sys.exit('Error: cannot remove root environment,\n'
' add -n NAME or -p PREFIX option')
actions = {plan.PREFIX: prefix,
plan.UNLINK: sorted(linked(prefix))}
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
sys.exit('Error: cannot remove %s from root environment' %
', '.join(common.root_no_rm))
actions = plan.remove_actions(prefix, specs, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
return
sys.exit('Error: no packages found to remove from '
'environment: %s' % prefix)
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if not pscheck.main(args):
common.confirm_yn(args)
plan.execute_actions(actions, index, verbose=not args.quiet)
if args.all:
rm_rf(prefix)
<|code_end|>
conda/plan.py
<|code_start|>
"""
Handle the planning of installs and their execution.
NOTE:
conda.install uses canonical package names in its interface functions,
whereas conda.resolve uses package filenames, as those are used as index
keys. We try to keep fixes to this "impedance mismatch" local to this
module.
"""
from __future__ import print_function, division, absolute_import
import re
import sys
from logging import getLogger
from collections import defaultdict
from os.path import abspath, isfile, join, exists
from conda import config
from conda import install
from conda.fetch import fetch_pkg
from conda.history import History
from conda.resolve import MatchSpec, Resolve
from conda.utils import md5_file, human_bytes
log = getLogger(__name__)
# op codes
FETCH = 'FETCH'
EXTRACT = 'EXTRACT'
UNLINK = 'UNLINK'
LINK = 'LINK'
RM_EXTRACTED = 'RM_EXTRACTED'
RM_FETCHED = 'RM_FETCHED'
PREFIX = 'PREFIX'
PRINT = 'PRINT'
PROGRESS = 'PROGRESS'
SYMLINK_CONDA = 'SYMLINK_CONDA'
progress_cmds = set([EXTRACT, RM_EXTRACTED, LINK, UNLINK])
def print_dists(dists_extras):
fmt = " %-27s|%17s"
print(fmt % ('package', 'build'))
print(fmt % ('-' * 27, '-' * 17))
for dist, extra in dists_extras:
line = fmt % tuple(dist.rsplit('-', 1))
if extra:
line += extra
print(line)
def split_linkarg(arg):
"Return tuple(dist, pkgs_dir, linktype)"
pat = re.compile(r'\s*(\S+)(?:\s+(.+?)\s+(\d+))?\s*$')
m = pat.match(arg)
dist, pkgs_dir, linktype = m.groups()
if pkgs_dir is None:
pkgs_dir = config.pkgs_dirs[0]
if linktype is None:
linktype = install.LINK_HARD
return dist, pkgs_dir, int(linktype)
def display_actions(actions, index=None):
if actions.get(FETCH):
print("\nThe following packages will be downloaded:\n")
disp_lst = []
for dist in actions[FETCH]:
info = index[dist + '.tar.bz2']
extra = '%15s' % human_bytes(info['size'])
if config.show_channel_urls:
extra += ' %s' % config.canonical_channel_name(
info.get('channel'))
disp_lst.append((dist, extra))
print_dists(disp_lst)
if index and len(actions[FETCH]) > 1:
print(' ' * 4 + '-' * 60)
print(" " * 43 + "Total: %14s" %
human_bytes(sum(index[dist + '.tar.bz2']['size']
for dist in actions[FETCH])))
if actions.get(UNLINK):
print("\nThe following packages will be UN-linked:\n")
print_dists([
(dist, None)
for dist in actions[UNLINK]])
if actions.get(LINK):
print("\nThe following packages will be linked:\n")
lst = []
for arg in actions[LINK]:
dist, pkgs_dir, lt = split_linkarg(arg)
extra = ' %s' % install.link_name_map.get(lt)
lst.append((dist, extra))
print_dists(lst)
print()
# the order matters here, don't change it
action_codes = FETCH, EXTRACT, UNLINK, LINK, SYMLINK_CONDA, RM_EXTRACTED, RM_FETCHED
def nothing_to_do(actions):
for op in action_codes:
if actions.get(op):
return False
return True
def plan_from_actions(actions):
if 'op_order' in actions and actions['op_order']:
op_order = actions['op_order']
else:
op_order = action_codes
assert PREFIX in actions and actions[PREFIX]
res = ['# plan',
'PREFIX %s' % actions[PREFIX]]
for op in op_order:
if op not in actions:
continue
if not actions[op]:
continue
if '_' not in op:
res.append('PRINT %sing packages ...' % op.capitalize())
if op in progress_cmds:
res.append('PROGRESS %d' % len(actions[op]))
for arg in actions[op]:
res.append('%s %s' % (op, arg))
return res
def extracted_where(dist):
for pkgs_dir in config.pkgs_dirs:
if install.is_extracted(pkgs_dir, dist):
return pkgs_dir
return None
def ensure_linked_actions(dists, prefix):
actions = defaultdict(list)
actions[PREFIX] = prefix
for dist in dists:
if install.is_linked(prefix, dist):
continue
extracted_in = extracted_where(dist)
if extracted_in:
if install.try_hard_link(extracted_in, prefix, dist):
lt = install.LINK_HARD
else:
lt = (install.LINK_SOFT if (config.allow_softlinks and
sys.platform != 'win32') else
install.LINK_COPY)
actions[LINK].append('%s %s %d' % (dist, extracted_in, lt))
continue
actions[LINK].append(dist)
actions[EXTRACT].append(dist)
if install.is_fetched(config.pkgs_dirs[0], dist):
continue
actions[FETCH].append(dist)
return actions
def force_linked_actions(dists, index, prefix):
actions = defaultdict(list)
actions[PREFIX] = prefix
actions['op_order'] = (RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT,
UNLINK, LINK)
for dist in dists:
fn = dist + '.tar.bz2'
pkg_path = join(config.pkgs_dirs[0], fn)
if isfile(pkg_path):
try:
if md5_file(pkg_path) != index[fn]['md5']:
actions[RM_FETCHED].append(dist)
actions[FETCH].append(dist)
except KeyError:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
else:
actions[FETCH].append(dist)
actions[RM_EXTRACTED].append(dist)
actions[EXTRACT].append(dist)
if isfile(join(prefix, 'conda-meta', dist + '.json')):
actions[UNLINK].append(dist)
actions[LINK].append(dist)
return actions
# -------------------------------------------------------------------
def is_root_prefix(prefix):
return abspath(prefix) == abspath(config.root_dir)
def dist2spec3v(dist):
name, version, unused_build = dist.rsplit('-', 2)
return '%s %s*' % (name, version[:3])
def add_defaults_to_specs(r, linked, specs):
# TODO: This should use the pinning mechanism. But don't change the API:
# cas uses it.
if r.explicit(specs):
return
log.debug('H0 specs=%r' % specs)
names_linked = {install.name_dist(dist): dist for dist in linked}
names_ms = {MatchSpec(s).name: MatchSpec(s) for s in specs}
for name, def_ver in [('python', config.default_python),]:
#('numpy', config.default_numpy)]:
ms = names_ms.get(name)
if ms and ms.strictness > 1:
# if any of the specifications mention the Python/Numpy version,
# we don't need to add the default spec
log.debug('H1 %s' % name)
continue
any_depends_on = any(ms2.name == name
for spec in specs
for fn in r.get_max_dists(MatchSpec(spec))
for ms2 in r.ms_depends(fn))
log.debug('H2 %s %s' % (name, any_depends_on))
if not any_depends_on and name not in names_ms:
# if nothing depends on Python/Numpy AND the Python/Numpy is not
# specified, we don't need to add the default spec
log.debug('H2A %s' % name)
continue
if (any_depends_on and len(specs) >= 1 and
MatchSpec(specs[0]).strictness == 3):
# if something depends on Python/Numpy, but the spec is very
# explicit, we also don't need to add the default spec
log.debug('H2B %s' % name)
continue
if name in names_linked:
# if Python/Numpy is already linked, we add that instead of the
# default
log.debug('H3 %s' % name)
specs.append(dist2spec3v(names_linked[name]))
continue
if (name, def_ver) in [('python', '3.3'), ('python', '3.4')]:
# Don't include Python 3 in the specs if this is the Python 3
# version of conda.
continue
specs.append('%s %s*' % (name, def_ver))
log.debug('HF specs=%r' % specs)
def get_pinned_specs(prefix):
pinfile = join(prefix, 'conda-meta', 'pinned')
if not exists(pinfile):
return []
with open(pinfile) as f:
return list(filter(len, f.read().strip().split('\n')))
def install_actions(prefix, index, specs, force=False, only_names=None, pinned=True, minimal_hint=False):
r = Resolve(index)
linked = install.linked(prefix)
if config.self_update and is_root_prefix(prefix):
specs.append('conda')
add_defaults_to_specs(r, linked, specs)
if pinned:
pinned_specs = get_pinned_specs(prefix)
specs += pinned_specs
# TODO: Improve error messages here
must_have = {}
for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
config.track_features, minimal_hint=minimal_hint):
dist = fn[:-8]
name = install.name_dist(dist)
if only_names and name not in only_names:
continue
must_have[name] = dist
if is_root_prefix(prefix):
if install.on_win:
for name in install.win_ignore_root:
if name in must_have:
del must_have[name]
for name in config.foreign:
if name in must_have:
del must_have[name]
else:
# discard conda from other environments
if 'conda' in must_have:
sys.exit("Error: 'conda' can only be installed into "
"root environment")
smh = sorted(must_have.values())
if force:
actions = force_linked_actions(smh, index, prefix)
else:
actions = ensure_linked_actions(smh, prefix)
if actions[LINK] and sys.platform != 'win32':
actions[SYMLINK_CONDA] = [config.root_dir]
for dist in sorted(linked):
name = install.name_dist(dist)
if name in must_have and dist != must_have[name]:
actions[UNLINK].append(dist)
return actions
def remove_actions(prefix, specs, pinned=True):
linked = install.linked(prefix)
mss = [MatchSpec(spec) for spec in specs]
pinned_specs = get_pinned_specs(prefix)
actions = defaultdict(list)
actions[PREFIX] = prefix
for dist in sorted(linked):
if any(ms.match('%s.tar.bz2' % dist) for ms in mss):
if pinned and any(MatchSpec(spec).match('%s.tar.bz2' % dist) for spec in
pinned_specs):
raise RuntimeError("Cannot remove %s because it is pinned. Use --no-pin to override." % dist)
actions[UNLINK].append(dist)
return actions
def remove_features_actions(prefix, index, features):
linked = install.linked(prefix)
r = Resolve(index)
actions = defaultdict(list)
actions[PREFIX] = prefix
_linked = [d + '.tar.bz2' for d in linked]
to_link = []
for dist in sorted(linked):
fn = dist + '.tar.bz2'
if fn not in index:
continue
if r.track_features(fn).intersection(features):
actions[UNLINK].append(dist)
if r.features(fn).intersection(features):
actions[UNLINK].append(dist)
subst = r.find_substitute(_linked, features, fn)
if subst:
to_link.append(subst[:-8])
if to_link:
actions.update(ensure_linked_actions(to_link, prefix))
return actions
def revert_actions(prefix, revision=-1):
h = History(prefix)
h.update()
try:
state = h.get_state(revision)
except IndexError:
sys.exit("Error: no such revision: %d" % revision)
curr = h.get_state()
if state == curr:
return {}
actions = ensure_linked_actions(state, prefix)
for dist in curr - state:
actions[UNLINK].append(dist)
return actions
# ---------------------------- EXECUTION --------------------------
def fetch(index, dist):
assert index is not None
fn = dist + '.tar.bz2'
fetch_pkg(index[fn])
def link(prefix, arg, index=None):
dist, pkgs_dir, lt = split_linkarg(arg)
install.link(pkgs_dir, prefix, dist, lt, index=index)
def cmds_from_plan(plan):
res = []
for line in plan:
log.debug(' %s' % line)
line = line.strip()
if not line or line.startswith('#'):
continue
res.append(line.split(None, 1))
return res
def execute_plan(plan, index=None, verbose=False):
if verbose:
from conda.console import setup_verbose_handlers
setup_verbose_handlers()
# set default prefix
prefix = config.root_dir
i = None
cmds = cmds_from_plan(plan)
for cmd, arg in cmds:
if i is not None and cmd in progress_cmds:
i += 1
getLogger('progress.update').info((install.name_dist(arg), i))
if cmd == PREFIX:
prefix = arg
elif cmd == PRINT:
getLogger('print').info(arg)
elif cmd == FETCH:
fetch(index, arg)
elif cmd == PROGRESS:
i = 0
maxval = int(arg)
getLogger('progress.start').info(maxval)
elif cmd == EXTRACT:
install.extract(config.pkgs_dirs[0], arg)
elif cmd == RM_EXTRACTED:
install.rm_extracted(config.pkgs_dirs[0], arg)
elif cmd == RM_FETCHED:
install.rm_fetched(config.pkgs_dirs[0], arg)
elif cmd == LINK:
link(prefix, arg, index=index)
elif cmd == UNLINK:
install.unlink(prefix, arg)
elif cmd == SYMLINK_CONDA:
install.symlink_conda(prefix, arg)
else:
raise Exception("Did not expect command: %r" % cmd)
if i is not None and cmd in progress_cmds and maxval == i:
i = None
getLogger('progress.stop').info(None)
install.messages(prefix)
def execute_actions(actions, index=None, verbose=False):
plan = plan_from_actions(actions)
with History(actions[PREFIX]):
execute_plan(plan, index, verbose)
if __name__ == '__main__':
# for testing new revert_actions() only
from pprint import pprint
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
<|code_end|>
| conda/cli/main_remove.py
# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import print_function, division, absolute_import
from argparse import RawDescriptionHelpFormatter
from conda.cli import common
help = "Remove a list of packages from a specified conda environment."
descr = help + """
Normally, only the specified package is removed, and not the packages
which may depend on the package. Hence this command should be used
with caution.
"""
example = """
examples:
conda remove -n myenv scipy
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'remove',
formatter_class = RawDescriptionHelpFormatter,
description = descr,
help = help,
epilog = example,
)
common.add_parser_yes(p)
p.add_argument(
"--all",
action = "store_true",
help = "remove all packages, i.e. the entire environment",
)
p.add_argument(
"--features",
action = "store_true",
help = "remove features (instead of packages)",
)
common.add_parser_no_pin(p)
common.add_parser_channels(p)
common.add_parser_prefix(p)
common.add_parser_quiet(p)
p.add_argument(
'package_names',
metavar = 'package_name',
action = "store",
nargs = '*',
help = "package names to remove from environment",
)
p.set_defaults(func=execute)
def execute(args, parser):
import sys
import conda.plan as plan
from conda.api import get_index
from conda.cli import pscheck
from conda.install import rm_rf, linked
from conda import config
if not (args.all or args.package_names):
sys.exit('Error: no package names supplied,\n'
' try "conda remove -h" for more details')
prefix = common.get_prefix(args)
common.check_write('remove', prefix)
common.ensure_override_channels_requires_channel(args)
channel_urls = args.channel or ()
index = get_index(channel_urls=channel_urls,
prepend=not args.override_channels)
if args.features:
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
elif args.all:
if plan.is_root_prefix(prefix):
sys.exit('Error: cannot remove root environment,\n'
' add -n NAME or -p PREFIX option')
actions = {plan.PREFIX: prefix,
plan.UNLINK: sorted(linked(prefix))}
else:
specs = common.specs_from_args(args.package_names)
if (plan.is_root_prefix(prefix) and
common.names_in_specs(common.root_no_rm, specs)):
sys.exit('Error: cannot remove %s from root environment' %
', '.join(common.root_no_rm))
actions = plan.remove_actions(prefix, specs, pinned=args.pinned)
if plan.nothing_to_do(actions):
if args.all:
rm_rf(prefix)
return
sys.exit('Error: no packages found to remove from '
'environment: %s' % prefix)
print()
print("Package plan for package removal in environment %s:" % prefix)
plan.display_actions(actions, index)
if not pscheck.main(args):
common.confirm_yn(args)
plan.execute_actions(actions, index, verbose=not args.quiet)
if args.all:
rm_rf(prefix)
conda/plan.py
"""
Handle the planning of installs and their execution.
NOTE:
conda.install uses canonical package names in its interface functions,
whereas conda.resolve uses package filenames, as those are used as index
keys. We try to keep fixes to this "impedance mismatch" local to this
module.
"""
from __future__ import print_function, division, absolute_import
import re
import sys
from logging import getLogger
from collections import defaultdict
from os.path import abspath, isfile, join, exists
from conda import config
from conda import install
from conda.fetch import fetch_pkg
from conda.history import History
from conda.resolve import MatchSpec, Resolve, Package
from conda.utils import md5_file, human_bytes
log = getLogger(__name__)
# op codes
FETCH = 'FETCH'
EXTRACT = 'EXTRACT'
UNLINK = 'UNLINK'
LINK = 'LINK'
RM_EXTRACTED = 'RM_EXTRACTED'
RM_FETCHED = 'RM_FETCHED'
PREFIX = 'PREFIX'
PRINT = 'PRINT'
PROGRESS = 'PROGRESS'
SYMLINK_CONDA = 'SYMLINK_CONDA'
progress_cmds = set([EXTRACT, RM_EXTRACTED, LINK, UNLINK])
def print_dists(dists_extras):
fmt = " %-27s|%17s"
print(fmt % ('package', 'build'))
print(fmt % ('-' * 27, '-' * 17))
for dist, extra in dists_extras:
line = fmt % tuple(dist.rsplit('-', 1))
if extra:
line += extra
print(line)
def split_linkarg(arg):
"Return tuple(dist, pkgs_dir, linktype)"
pat = re.compile(r'\s*(\S+)(?:\s+(.+?)\s+(\d+))?\s*$')
m = pat.match(arg)
dist, pkgs_dir, linktype = m.groups()
if pkgs_dir is None:
pkgs_dir = config.pkgs_dirs[0]
if linktype is None:
linktype = install.LINK_HARD
return dist, pkgs_dir, int(linktype)
def display_actions(actions, index):
if actions.get(FETCH):
print("\nThe following packages will be downloaded:\n")
disp_lst = []
for dist in actions[FETCH]:
info = index[dist + '.tar.bz2']
extra = '%15s' % human_bytes(info['size'])
if config.show_channel_urls:
extra += ' %s' % config.canonical_channel_name(
info.get('channel'))
disp_lst.append((dist, extra))
print_dists(disp_lst)
if index and len(actions[FETCH]) > 1:
print(' ' * 4 + '-' * 60)
print(" " * 43 + "Total: %14s" %
human_bytes(sum(index[dist + '.tar.bz2']['size']
for dist in actions[FETCH])))
# package -> [oldver-oldbuild, newver-newbuild]
packages = defaultdict(lambda: list(('', '')))
features = defaultdict(lambda: list(('', '')))
# This assumes each package will appear in LINK no more than once.
Packages = {}
linktypes = {}
for arg in actions.get(LINK, []):
dist, pkgs_dir, lt = split_linkarg(arg)
pkg, ver, build = dist.rsplit('-', 2)
packages[pkg][1] = ver + '-' + build
Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
linktypes[pkg] = lt
features[pkg][1] = index[dist + '.tar.bz2'].get('features', '')
for arg in actions.get(UNLINK, []):
dist, pkgs_dir, lt = split_linkarg(arg)
pkg, ver, build = dist.rsplit('-', 2)
packages[pkg][0] = ver + '-' + build
Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
features[pkg][0] = index[dist + '.tar.bz2'].get('features', '')
# Put a minimum length here---. .--For the :
# v v
maxpkg = max(len(max(packages or [''], key=len)), 0) + 1
maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0])
maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1])
maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0])
maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1])
maxoldchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
packages[pkg][0]].channel) for pkg in packages if packages[pkg][0]] or
[''], key=len))
maxnewchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
packages[pkg][1]].channel) for pkg in packages if packages[pkg][1]] or
[''], key=len))
new = {pkg for pkg in packages if not packages[pkg][0]}
removed = {pkg for pkg in packages if not packages[pkg][1]}
updated = set()
downgraded = set()
oldfmt = {}
newfmt = {}
for pkg in packages:
# That's right. I'm using old-style string formatting to generate a
# string with new-style string formatting.
oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
if config.show_channel_urls:
oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel
if packages[pkg][0]:
newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
else:
newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver)
if config.show_channel_urls:
newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel
# TODO: Should we also care about the old package's link type?
if pkg in linktypes and linktypes[pkg] != install.LINK_HARD:
newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]]
if features[pkg][0]:
oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
if features[pkg][1]:
newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
if pkg in new or pkg in removed:
continue
P0 = Packages[pkg + '-' + packages[pkg][0]]
P1 = Packages[pkg + '-' + packages[pkg][1]]
try:
# <= here means that unchanged packages will be put in updated
newer = (P0.name, P0.norm_version, P0.build_number) <= (P1.name, P1.norm_version, P1.build_number)
except TypeError:
newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number)
if newer:
updated.add(pkg)
else:
downgraded.add(pkg)
arrow = ' --> '
lead = ' '*4
def format(s, pkg):
channel = ['', '']
for i in range(2):
if packages[pkg][i]:
channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel)
return lead + s.format(pkg=pkg+':', vers=packages[pkg],
channel=channel, features=features[pkg])
if new:
print("\nThe following NEW packages will be INSTALLED:\n")
for pkg in sorted(new):
print(format(newfmt[pkg], pkg))
if removed:
print("\nThe following packages will be REMOVED:\n")
for pkg in sorted(removed):
print(format(oldfmt[pkg], pkg))
if updated:
print("\nThe following packages will be UPDATED:\n")
for pkg in sorted(updated):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
if downgraded:
print("\nThe following packages will be DOWNGRADED:\n")
for pkg in sorted(downgraded):
print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
print()
# the order matters here, don't change it
action_codes = FETCH, EXTRACT, UNLINK, LINK, SYMLINK_CONDA, RM_EXTRACTED, RM_FETCHED
def nothing_to_do(actions):
for op in action_codes:
if actions.get(op):
return False
return True
def plan_from_actions(actions):
if 'op_order' in actions and actions['op_order']:
op_order = actions['op_order']
else:
op_order = action_codes
assert PREFIX in actions and actions[PREFIX]
res = ['# plan',
'PREFIX %s' % actions[PREFIX]]
for op in op_order:
if op not in actions:
continue
if not actions[op]:
continue
if '_' not in op:
res.append('PRINT %sing packages ...' % op.capitalize())
if op in progress_cmds:
res.append('PROGRESS %d' % len(actions[op]))
for arg in actions[op]:
res.append('%s %s' % (op, arg))
return res
def extracted_where(dist):
for pkgs_dir in config.pkgs_dirs:
if install.is_extracted(pkgs_dir, dist):
return pkgs_dir
return None
def ensure_linked_actions(dists, prefix):
actions = defaultdict(list)
actions[PREFIX] = prefix
for dist in dists:
if install.is_linked(prefix, dist):
continue
extracted_in = extracted_where(dist)
if extracted_in:
if install.try_hard_link(extracted_in, prefix, dist):
lt = install.LINK_HARD
else:
lt = (install.LINK_SOFT if (config.allow_softlinks and
sys.platform != 'win32') else
install.LINK_COPY)
actions[LINK].append('%s %s %d' % (dist, extracted_in, lt))
continue
actions[LINK].append(dist)
actions[EXTRACT].append(dist)
if install.is_fetched(config.pkgs_dirs[0], dist):
continue
actions[FETCH].append(dist)
return actions
def force_linked_actions(dists, index, prefix):
actions = defaultdict(list)
actions[PREFIX] = prefix
actions['op_order'] = (RM_FETCHED, FETCH, RM_EXTRACTED, EXTRACT,
UNLINK, LINK)
for dist in dists:
fn = dist + '.tar.bz2'
pkg_path = join(config.pkgs_dirs[0], fn)
if isfile(pkg_path):
try:
if md5_file(pkg_path) != index[fn]['md5']:
actions[RM_FETCHED].append(dist)
actions[FETCH].append(dist)
except KeyError:
sys.stderr.write('Warning: cannot lookup MD5 of: %s' % fn)
else:
actions[FETCH].append(dist)
actions[RM_EXTRACTED].append(dist)
actions[EXTRACT].append(dist)
if isfile(join(prefix, 'conda-meta', dist + '.json')):
actions[UNLINK].append(dist)
actions[LINK].append(dist)
return actions
# -------------------------------------------------------------------
def is_root_prefix(prefix):
return abspath(prefix) == abspath(config.root_dir)
def dist2spec3v(dist):
name, version, unused_build = dist.rsplit('-', 2)
return '%s %s*' % (name, version[:3])
def add_defaults_to_specs(r, linked, specs):
# TODO: This should use the pinning mechanism. But don't change the API:
# cas uses it.
if r.explicit(specs):
return
log.debug('H0 specs=%r' % specs)
names_linked = {install.name_dist(dist): dist for dist in linked}
names_ms = {MatchSpec(s).name: MatchSpec(s) for s in specs}
for name, def_ver in [('python', config.default_python),]:
#('numpy', config.default_numpy)]:
ms = names_ms.get(name)
if ms and ms.strictness > 1:
# if any of the specifications mention the Python/Numpy version,
# we don't need to add the default spec
log.debug('H1 %s' % name)
continue
any_depends_on = any(ms2.name == name
for spec in specs
for fn in r.get_max_dists(MatchSpec(spec))
for ms2 in r.ms_depends(fn))
log.debug('H2 %s %s' % (name, any_depends_on))
if not any_depends_on and name not in names_ms:
# if nothing depends on Python/Numpy AND the Python/Numpy is not
# specified, we don't need to add the default spec
log.debug('H2A %s' % name)
continue
if (any_depends_on and len(specs) >= 1 and
MatchSpec(specs[0]).strictness == 3):
# if something depends on Python/Numpy, but the spec is very
# explicit, we also don't need to add the default spec
log.debug('H2B %s' % name)
continue
if name in names_linked:
# if Python/Numpy is already linked, we add that instead of the
# default
log.debug('H3 %s' % name)
specs.append(dist2spec3v(names_linked[name]))
continue
if (name, def_ver) in [('python', '3.3'), ('python', '3.4')]:
# Don't include Python 3 in the specs if this is the Python 3
# version of conda.
continue
specs.append('%s %s*' % (name, def_ver))
log.debug('HF specs=%r' % specs)
def get_pinned_specs(prefix):
pinfile = join(prefix, 'conda-meta', 'pinned')
if not exists(pinfile):
return []
with open(pinfile) as f:
return list(filter(len, f.read().strip().split('\n')))
def install_actions(prefix, index, specs, force=False, only_names=None, pinned=True, minimal_hint=False):
r = Resolve(index)
linked = install.linked(prefix)
if config.self_update and is_root_prefix(prefix):
specs.append('conda')
add_defaults_to_specs(r, linked, specs)
if pinned:
pinned_specs = get_pinned_specs(prefix)
specs += pinned_specs
# TODO: Improve error messages here
must_have = {}
for fn in r.solve(specs, [d + '.tar.bz2' for d in linked],
config.track_features, minimal_hint=minimal_hint):
dist = fn[:-8]
name = install.name_dist(dist)
if only_names and name not in only_names:
continue
must_have[name] = dist
if is_root_prefix(prefix):
if install.on_win:
for name in install.win_ignore_root:
if name in must_have:
del must_have[name]
for name in config.foreign:
if name in must_have:
del must_have[name]
else:
# discard conda from other environments
if 'conda' in must_have:
sys.exit("Error: 'conda' can only be installed into "
"root environment")
smh = sorted(must_have.values())
if force:
actions = force_linked_actions(smh, index, prefix)
else:
actions = ensure_linked_actions(smh, prefix)
if actions[LINK] and sys.platform != 'win32':
actions[SYMLINK_CONDA] = [config.root_dir]
for dist in sorted(linked):
name = install.name_dist(dist)
if name in must_have and dist != must_have[name]:
actions[UNLINK].append(dist)
return actions
def remove_actions(prefix, specs, pinned=True):
linked = install.linked(prefix)
mss = [MatchSpec(spec) for spec in specs]
pinned_specs = get_pinned_specs(prefix)
actions = defaultdict(list)
actions[PREFIX] = prefix
for dist in sorted(linked):
if any(ms.match('%s.tar.bz2' % dist) for ms in mss):
if pinned and any(MatchSpec(spec).match('%s.tar.bz2' % dist) for spec in
pinned_specs):
raise RuntimeError("Cannot remove %s because it is pinned. Use --no-pin to override." % dist)
actions[UNLINK].append(dist)
return actions
def remove_features_actions(prefix, index, features):
linked = install.linked(prefix)
r = Resolve(index)
actions = defaultdict(list)
actions[PREFIX] = prefix
_linked = [d + '.tar.bz2' for d in linked]
to_link = []
for dist in sorted(linked):
fn = dist + '.tar.bz2'
if fn not in index:
continue
if r.track_features(fn).intersection(features):
actions[UNLINK].append(dist)
if r.features(fn).intersection(features):
actions[UNLINK].append(dist)
subst = r.find_substitute(_linked, features, fn)
if subst:
to_link.append(subst[:-8])
if to_link:
actions.update(ensure_linked_actions(to_link, prefix))
return actions
def revert_actions(prefix, revision=-1):
h = History(prefix)
h.update()
try:
state = h.get_state(revision)
except IndexError:
sys.exit("Error: no such revision: %d" % revision)
curr = h.get_state()
if state == curr:
return {}
actions = ensure_linked_actions(state, prefix)
for dist in curr - state:
actions[UNLINK].append(dist)
return actions
# ---------------------------- EXECUTION --------------------------
def fetch(index, dist):
assert index is not None
fn = dist + '.tar.bz2'
fetch_pkg(index[fn])
def link(prefix, arg, index=None):
dist, pkgs_dir, lt = split_linkarg(arg)
install.link(pkgs_dir, prefix, dist, lt, index=index)
def cmds_from_plan(plan):
res = []
for line in plan:
log.debug(' %s' % line)
line = line.strip()
if not line or line.startswith('#'):
continue
res.append(line.split(None, 1))
return res
def execute_plan(plan, index=None, verbose=False):
if verbose:
from conda.console import setup_verbose_handlers
setup_verbose_handlers()
# set default prefix
prefix = config.root_dir
i = None
cmds = cmds_from_plan(plan)
for cmd, arg in cmds:
if i is not None and cmd in progress_cmds:
i += 1
getLogger('progress.update').info((install.name_dist(arg), i))
if cmd == PREFIX:
prefix = arg
elif cmd == PRINT:
getLogger('print').info(arg)
elif cmd == FETCH:
fetch(index, arg)
elif cmd == PROGRESS:
i = 0
maxval = int(arg)
getLogger('progress.start').info(maxval)
elif cmd == EXTRACT:
install.extract(config.pkgs_dirs[0], arg)
elif cmd == RM_EXTRACTED:
install.rm_extracted(config.pkgs_dirs[0], arg)
elif cmd == RM_FETCHED:
install.rm_fetched(config.pkgs_dirs[0], arg)
elif cmd == LINK:
link(prefix, arg, index=index)
elif cmd == UNLINK:
install.unlink(prefix, arg)
elif cmd == SYMLINK_CONDA:
install.symlink_conda(prefix, arg)
else:
raise Exception("Did not expect command: %r" % cmd)
if i is not None and cmd in progress_cmds and maxval == i:
i = None
getLogger('progress.stop').info(None)
install.messages(prefix)
def execute_actions(actions, index=None, verbose=False):
plan = plan_from_actions(actions)
with History(actions[PREFIX]):
execute_plan(plan, index, verbose)
if __name__ == '__main__':
# for testing new revert_actions() only
from pprint import pprint
pprint(dict(revert_actions(sys.prefix, int(sys.argv[1]))))
| conda/cli/main_remove.py
--- a/conda/cli/main_remove.py
+++ b/conda/cli/main_remove.py
@@ -63,6 +63,7 @@ def execute(args, parser):
from conda.api import get_index
from conda.cli import pscheck
from conda.install import rm_rf, linked
+ from conda import config
if not (args.all or args.package_names):
sys.exit('Error: no package names supplied,\n'
@@ -71,12 +72,11 @@ def execute(args, parser):
prefix = common.get_prefix(args)
common.check_write('remove', prefix)
- index = None
+ common.ensure_override_channels_requires_channel(args)
+ channel_urls = args.channel or ()
+ index = get_index(channel_urls=channel_urls,
+ prepend=not args.override_channels)
if args.features:
- common.ensure_override_channels_requires_channel(args)
- channel_urls = args.channel or ()
- index = get_index(channel_urls=channel_urls,
- prepend=not args.override_channels)
features = set(args.package_names)
actions = plan.remove_features_actions(prefix, index, features)
conda/plan.py
--- a/conda/plan.py
+++ b/conda/plan.py
@@ -20,7 +20,7 @@
from conda import install
from conda.fetch import fetch_pkg
from conda.history import History
-from conda.resolve import MatchSpec, Resolve
+from conda.resolve import MatchSpec, Resolve, Package
from conda.utils import md5_file, human_bytes
log = getLogger(__name__)
@@ -60,7 +60,7 @@ def split_linkarg(arg):
linktype = install.LINK_HARD
return dist, pkgs_dir, int(linktype)
-def display_actions(actions, index=None):
+def display_actions(actions, index):
if actions.get(FETCH):
print("\nThe following packages will be downloaded:\n")
@@ -79,19 +79,113 @@ def display_actions(actions, index=None):
print(" " * 43 + "Total: %14s" %
human_bytes(sum(index[dist + '.tar.bz2']['size']
for dist in actions[FETCH])))
- if actions.get(UNLINK):
- print("\nThe following packages will be UN-linked:\n")
- print_dists([
- (dist, None)
- for dist in actions[UNLINK]])
- if actions.get(LINK):
- print("\nThe following packages will be linked:\n")
- lst = []
- for arg in actions[LINK]:
- dist, pkgs_dir, lt = split_linkarg(arg)
- extra = ' %s' % install.link_name_map.get(lt)
- lst.append((dist, extra))
- print_dists(lst)
+
+ # package -> [oldver-oldbuild, newver-newbuild]
+ packages = defaultdict(lambda: list(('', '')))
+ features = defaultdict(lambda: list(('', '')))
+
+ # This assumes each package will appear in LINK no more than once.
+ Packages = {}
+ linktypes = {}
+ for arg in actions.get(LINK, []):
+ dist, pkgs_dir, lt = split_linkarg(arg)
+ pkg, ver, build = dist.rsplit('-', 2)
+ packages[pkg][1] = ver + '-' + build
+ Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
+ linktypes[pkg] = lt
+ features[pkg][1] = index[dist + '.tar.bz2'].get('features', '')
+ for arg in actions.get(UNLINK, []):
+ dist, pkgs_dir, lt = split_linkarg(arg)
+ pkg, ver, build = dist.rsplit('-', 2)
+ packages[pkg][0] = ver + '-' + build
+ Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2'])
+ features[pkg][0] = index[dist + '.tar.bz2'].get('features', '')
+
+ # Put a minimum length here---. .--For the :
+ # v v
+ maxpkg = max(len(max(packages or [''], key=len)), 0) + 1
+ maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0])
+ maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1])
+ maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0])
+ maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1])
+ maxoldchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
+ packages[pkg][0]].channel) for pkg in packages if packages[pkg][0]] or
+ [''], key=len))
+ maxnewchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' +
+ packages[pkg][1]].channel) for pkg in packages if packages[pkg][1]] or
+ [''], key=len))
+ new = {pkg for pkg in packages if not packages[pkg][0]}
+ removed = {pkg for pkg in packages if not packages[pkg][1]}
+ updated = set()
+ downgraded = set()
+ oldfmt = {}
+ newfmt = {}
+ for pkg in packages:
+ # That's right. I'm using old-style string formatting to generate a
+ # string with new-style string formatting.
+ oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver)
+ if config.show_channel_urls:
+ oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel
+ if packages[pkg][0]:
+ newfmt[pkg] = '{vers[1]:<%s}' % maxnewver
+ else:
+ newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver)
+ if config.show_channel_urls:
+ newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel
+ # TODO: Should we also care about the old package's link type?
+ if pkg in linktypes and linktypes[pkg] != install.LINK_HARD:
+ newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]]
+
+ if features[pkg][0]:
+ oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures
+ if features[pkg][1]:
+ newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures
+
+ if pkg in new or pkg in removed:
+ continue
+ P0 = Packages[pkg + '-' + packages[pkg][0]]
+ P1 = Packages[pkg + '-' + packages[pkg][1]]
+ try:
+ # <= here means that unchanged packages will be put in updated
+ newer = (P0.name, P0.norm_version, P0.build_number) <= (P1.name, P1.norm_version, P1.build_number)
+ except TypeError:
+ newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number)
+ if newer:
+ updated.add(pkg)
+ else:
+ downgraded.add(pkg)
+
+ arrow = ' --> '
+ lead = ' '*4
+
+ def format(s, pkg):
+ channel = ['', '']
+ for i in range(2):
+ if packages[pkg][i]:
+ channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel)
+ return lead + s.format(pkg=pkg+':', vers=packages[pkg],
+ channel=channel, features=features[pkg])
+
+ if new:
+ print("\nThe following NEW packages will be INSTALLED:\n")
+ for pkg in sorted(new):
+ print(format(newfmt[pkg], pkg))
+
+ if removed:
+ print("\nThe following packages will be REMOVED:\n")
+ for pkg in sorted(removed):
+ print(format(oldfmt[pkg], pkg))
+
+ if updated:
+ print("\nThe following packages will be UPDATED:\n")
+ for pkg in sorted(updated):
+ print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
+
+ if downgraded:
+ print("\nThe following packages will be DOWNGRADED:\n")
+ for pkg in sorted(downgraded):
+ print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg))
+
print()
# the order matters here, don't change it |
NoPackagesFound does not work correctly for missing recursive dependencies
| conda/resolve.py
<|code_start|>
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from itertools import combinations
from collections import defaultdict
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints)
from conda.console import setup_handlers
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
if version.endswith('.dev'):
version += '0'
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
return version
class NoPackagesFound(RuntimeError):
def __init__(self, msg, pkg):
super(NoPackagesFound, self).__init__(msg)
self.pkg = pkg
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
if not 'depends' in self.index[fn]:
raise NoPackagesFound('Bad metadata for %s' % fn, fn)
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) ==\
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
continue
res[pkg2.fn] = pkg2
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield [-v1, -v2]
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield clause
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield clause
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield clause
# Don't install any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield [-v[fn]]
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield clause
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
found = False
notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
found = True
continue
try:
dists.update(self.all_deps(pkg.fn, max_only=max_only))
except NoPackagesFound as e:
# Ignore any package that has nonexisting dependencies.
if e.pkg not in notfound:
notfound.append(e.pkg)
else:
dists[pkg.fn] = pkg
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None)
return dists
def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
try:
dists = self.get_dists(specs, max_only=True)
except NoPackagesFound:
# Handle packages that are not included because some dependencies
# couldn't be found.
pass
else:
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = self.gen_clauses(v, dists, specs, features)
solutions = min_sat(clauses)
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = list(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Check the common case first
dotlog.debug("Building the constraint with rhs: [0, 0]")
constraints = list(generate_constraints(eq, m, [0, 0], alg=alg))
# Only relevant for build_BDD
if constraints and constraints[0] == [false]:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and constraints[0] == [true]:
constraints = []
dotlog.debug("Checking for solutions with rhs: [0, 0]")
solution = sat(clauses + constraints)
if not solution:
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: ')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
def version_constraints(lo, hi):
return list(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
constraints = bisect_constraints(0, max_rhs, clauses, version_constraints)
dotlog.debug("Finding the minimal solution")
solutions = min_sat(clauses + constraints, N=m+1)
assert solutions, (specs, features)
if len(solutions) > 1:
print('Warning:', len(solutions), "possible package resolutions:")
for sol in solutions:
print('\t', [w[lit] for lit in sol if 0 < lit <= m])
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
hint = []
# Try to find the largest satisfiable subset
found = False
for i in range(len(specs), 0, -1):
if found:
break
for comb in combinations(specs, i):
try:
self.solve2(comb, features, guess=False)
except RuntimeError:
pass
else:
rem = set(specs) - set(comb)
rem.discard('conda')
if len(rem) == 1:
hint.append("%s" % rem.pop())
else:
hint.append("%s" % ' and '.join(rem))
found = True
if not hint:
return ''
if len(hint) == 1:
return ("\nHint: %s has a conflict with the remaining packages" %
hint[0])
return ("""
Hint: the following combinations of packages create a conflict with the
remaining packages:
- %s""" % '\n - '.join(hint))
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
<|code_end|>
| conda/resolve.py
from __future__ import print_function, division, absolute_import
import re
import sys
import logging
from itertools import combinations
from collections import defaultdict
from conda import verlib
from conda.utils import memoize
from conda.compat import itervalues, iteritems
from conda.logic import (false, true, sat, min_sat, generate_constraints,
bisect_constraints)
from conda.console import setup_handlers
log = logging.getLogger(__name__)
dotlog = logging.getLogger('dotupdate')
stdoutlog = logging.getLogger('stdoutlog')
stderrlog = logging.getLogger('stderrlog')
setup_handlers()
def normalized_version(version):
version = version.replace('rc', '.dev99999')
if version.endswith('.dev'):
version += '0'
try:
return verlib.NormalizedVersion(version)
except verlib.IrrationalVersionError:
return version
class NoPackagesFound(RuntimeError):
def __init__(self, msg, pkg):
super(NoPackagesFound, self).__init__(msg)
self.pkg = pkg
const_pat = re.compile(r'([=<>!]{1,2})(\S+)$')
def ver_eval(version, constraint):
"""
return the Boolean result of a comparison between two versions, where the
second argument includes the comparison operator. For example,
ver_eval('1.2', '>=1.1') will return True.
"""
a = version
m = const_pat.match(constraint)
if m is None:
raise RuntimeError("Did not recognize version specification: %r" %
constraint)
op, b = m.groups()
na = normalized_version(a)
nb = normalized_version(b)
if op == '==':
try:
return na == nb
except TypeError:
return a == b
elif op == '>=':
try:
return na >= nb
except TypeError:
return a >= b
elif op == '<=':
try:
return na <= nb
except TypeError:
return a <= b
elif op == '>':
try:
return na > nb
except TypeError:
return a > b
elif op == '<':
try:
return na < nb
except TypeError:
return a < b
elif op == '!=':
try:
return na != nb
except TypeError:
return a != b
else:
raise RuntimeError("Did not recognize version comparison operator: %r" %
constraint)
class VersionSpec(object):
def __init__(self, spec):
assert '|' not in spec
if spec.startswith(('=', '<', '>', '!')):
self.regex = False
self.constraints = spec.split(',')
else:
self.regex = True
rx = spec.replace('.', r'\.')
rx = rx.replace('*', r'.*')
rx = r'(%s)$' % rx
self.pat = re.compile(rx)
def match(self, version):
if self.regex:
return bool(self.pat.match(version))
else:
return all(ver_eval(version, c) for c in self.constraints)
class MatchSpec(object):
def __init__(self, spec):
self.spec = spec
parts = spec.split()
self.strictness = len(parts)
assert 1 <= self.strictness <= 3
self.name = parts[0]
if self.strictness == 2:
self.vspecs = [VersionSpec(s) for s in parts[1].split('|')]
elif self.strictness == 3:
self.ver_build = tuple(parts[1:3])
def match(self, fn):
assert fn.endswith('.tar.bz2')
name, version, build = fn[:-8].rsplit('-', 2)
if name != self.name:
return False
if self.strictness == 1:
return True
elif self.strictness == 2:
return any(vs.match(version) for vs in self.vspecs)
elif self.strictness == 3:
return bool((version, build) == self.ver_build)
def to_filename(self):
if self.strictness == 3:
return self.name + '-%s-%s.tar.bz2' % self.ver_build
else:
return None
def __eq__(self, other):
return self.spec == other.spec
def __hash__(self):
return hash(self.spec)
def __repr__(self):
return 'MatchSpec(%r)' % (self.spec)
def __str__(self):
return self.spec
class Package(object):
"""
The only purpose of this class is to provide package objects which
are sortable.
"""
def __init__(self, fn, info):
self.fn = fn
self.name = info['name']
self.version = info['version']
self.build_number = info['build_number']
self.build = info['build']
self.channel = info.get('channel')
self.norm_version = normalized_version(self.version)
# http://python3porting.com/problems.html#unorderable-types-cmp-and-cmp
# def __cmp__(self, other):
# if self.name != other.name:
# raise ValueError('cannot compare packages with different '
# 'names: %r %r' % (self.fn, other.fn))
# try:
# return cmp((self.norm_version, self.build_number),
# (other.norm_version, other.build_number))
# except TypeError:
# return cmp((self.version, self.build_number),
# (other.version, other.build_number))
def __lt__(self, other):
if self.name != other.name:
raise TypeError('cannot compare packages with different '
'names: %r %r' % (self.fn, other.fn))
try:
return ((self.norm_version, self.build_number, other.build) <
(other.norm_version, other.build_number, self.build))
except TypeError:
return ((self.version, self.build_number) <
(other.version, other.build_number))
def __eq__(self, other):
if not isinstance(other, Package):
return False
if self.name != other.name:
return False
try:
return ((self.norm_version, self.build_number, self.build) ==
(other.norm_version, other.build_number, other.build))
except TypeError:
return ((self.version, self.build_number, self.build) ==
(other.version, other.build_number, other.build))
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self < other or self == other
def __ge__(self, other):
return self > other or self == other
def __repr__(self):
return '<Package %s>' % self.fn
class Resolve(object):
def __init__(self, index):
self.index = index
self.groups = defaultdict(list) # map name to list of filenames
for fn, info in iteritems(index):
self.groups[info['name']].append(fn)
self.msd_cache = {}
def find_matches(self, ms):
for fn in sorted(self.groups[ms.name]):
if ms.match(fn):
yield fn
def ms_depends(self, fn):
# the reason we don't use @memoize here is to allow resetting the
# cache using self.msd_cache = {}, which is used during testing
try:
res = self.msd_cache[fn]
except KeyError:
if not 'depends' in self.index[fn]:
raise NoPackagesFound('Bad metadata for %s' % fn, fn)
depends = self.index[fn]['depends']
res = self.msd_cache[fn] = [MatchSpec(d) for d in depends]
return res
@memoize
def features(self, fn):
return set(self.index[fn].get('features', '').split())
@memoize
def track_features(self, fn):
return set(self.index[fn].get('track_features', '').split())
@memoize
def get_pkgs(self, ms, max_only=False):
pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)]
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
if max_only:
maxpkg = max(pkgs)
ret = []
for pkg in pkgs:
try:
if (pkg.name, pkg.norm_version, pkg.build_number) ==\
(maxpkg.name, maxpkg.norm_version, maxpkg.build_number):
ret.append(pkg)
except TypeError:
# They are not equal
pass
return ret
return pkgs
def get_max_dists(self, ms):
pkgs = self.get_pkgs(ms, max_only=True)
if not pkgs:
raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec)
for pkg in pkgs:
yield pkg.fn
def all_deps(self, root_fn, max_only=False):
res = {}
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
found = False
notfound = []
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
found = True
continue
try:
if ms.strictness < 3:
add_dependents(pkg2.fn, max_only=max_only)
except NoPackagesFound as e:
if e.pkg not in notfound:
notfound.append(e.pkg)
else:
found = True
res[pkg2.fn] = pkg2
if not found:
raise NoPackagesFound("Could not find some dependencies "
"for %s: %s" % (ms, ', '.join(notfound)), str(ms))
add_dependents(root_fn, max_only=max_only)
return res
def gen_clauses(self, v, dists, specs, features):
groups = defaultdict(list) # map name to list of filenames
for fn in dists:
groups[self.index[fn]['name']].append(fn)
for filenames in itervalues(groups):
# ensure packages with the same name conflict
for fn1 in filenames:
v1 = v[fn1]
for fn2 in filenames:
v2 = v[fn2]
if v1 < v2:
# NOT (fn1 AND fn2)
# e.g. NOT (numpy-1.6 AND numpy-1.7)
yield [-v1, -v2]
for fn1 in dists:
for ms in self.ms_depends(fn1):
# ensure dependencies are installed
# e.g. numpy-1.7 IMPLIES (python-2.7.3 OR python-2.7.4 OR ...)
clause = [-v[fn1]]
for fn2 in self.find_matches(ms):
if fn2 in dists:
clause.append(v[fn2])
assert len(clause) > 1, '%s %r' % (fn1, ms)
yield clause
for feat in features:
# ensure that a package (with required name) which has
# the feature is installed
# e.g. numpy-1.7 IMPLIES (numpy-1.8[mkl] OR numpy-1.7[mkl])
clause = [-v[fn1]]
for fn2 in groups[ms.name]:
if feat in self.features(fn2):
clause.append(v[fn2])
if len(clause) > 1:
yield clause
for spec in specs:
ms = MatchSpec(spec)
# ensure that a matching package with the feature is installed
for feat in features:
# numpy-1.7[mkl] OR numpy-1.8[mkl]
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists and feat in self.features(fn)]
if len(clause) > 0:
yield clause
# Don't install any package that has a feature that wasn't requested.
for fn in self.find_matches(ms):
if fn in dists and self.features(fn) - features:
yield [-v[fn]]
# finally, ensure a matching package itself is installed
# numpy-1.7-py27 OR numpy-1.7-py26 OR numpy-1.7-py33 OR
# numpy-1.7-py27[mkl] OR ...
clause = [v[fn] for fn in self.find_matches(ms)
if fn in dists]
assert len(clause) >= 1, ms
yield clause
def generate_version_eq(self, v, dists, include0=False):
groups = defaultdict(list) # map name to list of filenames
for fn in sorted(dists):
groups[self.index[fn]['name']].append(fn)
eq = []
max_rhs = 0
for filenames in sorted(itervalues(groups)):
pkgs = sorted(filenames, key=lambda i: dists[i], reverse=True)
i = 0
prev = pkgs[0]
for pkg in pkgs:
try:
if (dists[pkg].name, dists[pkg].norm_version,
dists[pkg].build_number) != (dists[prev].name,
dists[prev].norm_version, dists[prev].build_number):
i += 1
except TypeError:
i += 1
if i or include0:
eq += [(i, v[pkg])]
prev = pkg
max_rhs += i
return eq, max_rhs
def get_dists(self, specs, max_only=False):
dists = {}
for spec in specs:
found = False
notfound = []
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
if pkg.fn in dists:
found = True
continue
try:
dists.update(self.all_deps(pkg.fn, max_only=max_only))
except NoPackagesFound as e:
# Ignore any package that has nonexisting dependencies.
if e.pkg not in notfound:
notfound.append(e.pkg)
else:
dists[pkg.fn] = pkg
found = True
if not found:
raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), spec)
return dists
def solve2(self, specs, features, guess=True, alg='sorter', returnall=False):
log.debug("Solving for %s" % str(specs))
# First try doing it the "old way", i.e., just look at the most recent
# version of each package from the specs. This doesn't handle the more
# complicated cases that the pseudo-boolean solver does, but it's also
# much faster when it does work.
try:
dists = self.get_dists(specs, max_only=True)
except NoPackagesFound:
# Handle packages that are not included because some dependencies
# couldn't be found.
pass
else:
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
dotlog.debug("Solving using max dists only")
clauses = self.gen_clauses(v, dists, specs, features)
solutions = min_sat(clauses)
if len(solutions) == 1:
ret = [w[lit] for lit in solutions.pop(0) if 0 < lit]
if returnall:
return [ret]
return ret
dists = self.get_dists(specs)
v = {} # map fn to variable number
w = {} # map variable number to fn
i = -1 # in case the loop doesn't run
for i, fn in enumerate(sorted(dists)):
v[fn] = i + 1
w[i + 1] = fn
m = i + 1
clauses = list(self.gen_clauses(v, dists, specs, features))
if not clauses:
if returnall:
return [[]]
return []
eq, max_rhs = self.generate_version_eq(v, dists)
# Check the common case first
dotlog.debug("Building the constraint with rhs: [0, 0]")
constraints = list(generate_constraints(eq, m, [0, 0], alg=alg))
# Only relevant for build_BDD
if constraints and constraints[0] == [false]:
# XXX: This should *never* happen. build_BDD only returns false
# when the linear constraint is unsatisfiable, but any linear
# constraint can equal 0, by setting all the variables to 0.
solution = []
else:
if constraints and constraints[0] == [true]:
constraints = []
dotlog.debug("Checking for solutions with rhs: [0, 0]")
solution = sat(clauses + constraints)
if not solution:
# Second common case, check if it's unsatisfiable
dotlog.debug("Checking for unsatisfiability")
solution = sat(clauses)
if not solution:
if guess:
stderrlog.info('\nError: Unsatisfiable package '
'specifications.\nGenerating hint: ')
sys.exit(self.guess_bad_solve(specs, features))
raise RuntimeError("Unsatisfiable package specifications")
def version_constraints(lo, hi):
return list(generate_constraints(eq, m, [lo, hi], alg=alg))
log.debug("Bisecting the version constraint")
constraints = bisect_constraints(0, max_rhs, clauses, version_constraints)
dotlog.debug("Finding the minimal solution")
solutions = min_sat(clauses + constraints, N=m+1)
assert solutions, (specs, features)
if len(solutions) > 1:
print('Warning:', len(solutions), "possible package resolutions:")
for sol in solutions:
print('\t', [w[lit] for lit in sol if 0 < lit <= m])
if returnall:
return [[w[lit] for lit in sol if 0 < lit <= m] for sol in solutions]
return [w[lit] for lit in solutions.pop(0) if 0 < lit <= m]
def guess_bad_solve(self, specs, features):
# TODO: Check features as well
hint = []
# Try to find the largest satisfiable subset
found = False
for i in range(len(specs), 0, -1):
if found:
break
for comb in combinations(specs, i):
try:
self.solve2(comb, features, guess=False)
except RuntimeError:
pass
else:
rem = set(specs) - set(comb)
rem.discard('conda')
if len(rem) == 1:
hint.append("%s" % rem.pop())
else:
hint.append("%s" % ' and '.join(rem))
found = True
if not hint:
return ''
if len(hint) == 1:
return ("\nHint: %s has a conflict with the remaining packages" %
hint[0])
return ("""
Hint: the following combinations of packages create a conflict with the
remaining packages:
- %s""" % '\n - '.join(hint))
def explicit(self, specs):
"""
Given the specifications, return:
A. if one explicit specification (strictness=3) is given, and
all dependencies of this package are explicit as well ->
return the filenames of those dependencies (as well as the
explicit specification)
B. if not one explicit specifications are given ->
return the filenames of those (not thier dependencies)
C. None in all other cases
"""
if len(specs) == 1:
ms = MatchSpec(specs[0])
fn = ms.to_filename()
if fn is None:
return None
res = [ms2.to_filename() for ms2 in self.ms_depends(fn)]
res.append(fn)
else:
res = [MatchSpec(spec).to_filename() for spec in specs
if spec != 'conda']
if None in res:
return None
res.sort()
log.debug('explicit(%r) finished' % specs)
return res
@memoize
def sum_matches(self, fn1, fn2):
return sum(ms.match(fn2) for ms in self.ms_depends(fn1))
def find_substitute(self, installed, features, fn, max_only=False):
"""
Find a substitute package for `fn` (given `installed` packages)
which does *NOT* have `features`. If found, the substitute will
have the same package name and version and its dependencies will
match the installed packages as closely as possible.
If no substitute is found, None is returned.
"""
name, version, unused_build = fn.rsplit('-', 2)
candidates = {}
for pkg in self.get_pkgs(MatchSpec(name + ' ' + version), max_only=max_only):
fn1 = pkg.fn
if self.features(fn1).intersection(features):
continue
key = sum(self.sum_matches(fn1, fn2) for fn2 in installed)
candidates[key] = fn1
if candidates:
maxkey = max(candidates)
return candidates[maxkey]
else:
return None
def installed_features(self, installed):
"""
Return the set of all features of all `installed` packages,
"""
res = set()
for fn in installed:
try:
res.update(self.features(fn))
except KeyError:
pass
return res
def update_with_features(self, fn, features):
with_features = self.index[fn].get('with_features_depends')
if with_features is None:
return
key = ''
for fstr in with_features:
fs = set(fstr.split())
if fs <= features and len(fs) > len(set(key.split())):
key = fstr
if not key:
return
d = {ms.name: ms for ms in self.ms_depends(fn)}
for spec in with_features[key]:
ms = MatchSpec(spec)
d[ms.name] = ms
self.msd_cache[fn] = d.values()
def solve(self, specs, installed=None, features=None, max_only=False):
if installed is None:
installed = []
if features is None:
features = self.installed_features(installed)
for spec in specs:
ms = MatchSpec(spec)
for pkg in self.get_pkgs(ms, max_only=max_only):
fn = pkg.fn
features.update(self.track_features(fn))
log.debug('specs=%r features=%r' % (specs, features))
for spec in specs:
for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only):
fn = pkg.fn
self.update_with_features(fn, features)
stdoutlog.info("Solving package specifications: ")
try:
return self.explicit(specs) or self.solve2(specs, features)
except RuntimeError:
stdoutlog.info('\n')
raise
if __name__ == '__main__':
import json
from pprint import pprint
from optparse import OptionParser
from conda.cli.common import arg2spec
with open('../tests/index.json') as fi:
r = Resolve(json.load(fi))
p = OptionParser(usage="usage: %prog [options] SPEC(s)")
p.add_option("--mkl", action="store_true")
opts, args = p.parse_args()
features = set(['mkl']) if opts.mkl else set()
specs = [arg2spec(arg) for arg in args]
pprint(r.solve(specs, [], features))
| conda/resolve.py
--- a/conda/resolve.py
+++ b/conda/resolve.py
@@ -278,12 +278,25 @@ def all_deps(self, root_fn, max_only=False):
def add_dependents(fn1, max_only=False):
for ms in self.ms_depends(fn1):
+ found = False
+ notfound = []
for pkg2 in self.get_pkgs(ms, max_only=max_only):
if pkg2.fn in res:
+ found = True
continue
- res[pkg2.fn] = pkg2
- if ms.strictness < 3:
- add_dependents(pkg2.fn, max_only=max_only)
+ try:
+ if ms.strictness < 3:
+ add_dependents(pkg2.fn, max_only=max_only)
+ except NoPackagesFound as e:
+ if e.pkg not in notfound:
+ notfound.append(e.pkg)
+ else:
+ found = True
+ res[pkg2.fn] = pkg2
+
+ if not found:
+ raise NoPackagesFound("Could not find some dependencies "
+ "for %s: %s" % (ms, ', '.join(notfound)), str(ms))
add_dependents(root_fn, max_only=max_only)
return res
@@ -394,7 +407,7 @@ def get_dists(self, specs, max_only=False):
dists[pkg.fn] = pkg
found = True
if not found:
- raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None)
+ raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), spec)
return dists
|
"TLS does not appear to be verified\nAs far as I can tell conda is just using urllib2 which doesn't (...TRUNCATED) | "conda/connection.py\n<|code_start|>\n# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.i(...TRUNCATED) | "conda/connection.py\n# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io\n# All Rights (...TRUNCATED) | "conda/connection.py\n--- a/conda/connection.py\n+++ b/conda/connection.py\n@@ -7,106 +7,368 @@\n fr(...TRUNCATED) |
"Add ability to keep retrying with a lock error\nThe yum installer (IIRC) has a nice feature that it(...TRUNCATED) | "conda/lock.py\n<|code_start|>\n# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io\n# A(...TRUNCATED) | "conda/lock.py\n# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io\n# All Rights Reserv(...TRUNCATED) | "conda/lock.py\n--- a/conda/lock.py\n+++ b/conda/lock.py\n@@ -19,7 +19,7 @@\n import os\n from os.pa(...TRUNCATED) |
"conda does not prompt for proxy username and password \n```\n[ COMPLETE ] |##############(...TRUNCATED) | "conda/config.py\n<|code_start|>\n# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io\n#(...TRUNCATED) | "conda/config.py\n# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io\n# All Rights Rese(...TRUNCATED) | "conda/config.py\n--- a/conda/config.py\n+++ b/conda/config.py\n@@ -244,7 +244,10 @@ def get_allowed(...TRUNCATED) |
"`conda create --file deps.txt pkg1 pkg2 ... pkgn` doesn't work\n```\n$ echo \"scipy\" > deps.txt\n$(...TRUNCATED) | "conda/cli/install.py\n<|code_start|>\n# (c) Continuum Analytics, Inc. / http://continuum.io\n# All (...TRUNCATED) | "conda/cli/install.py\n# (c) Continuum Analytics, Inc. / http://continuum.io\n# All Rights Reserved\(...TRUNCATED) | "conda/cli/install.py\n--- a/conda/cli/install.py\n+++ b/conda/cli/install.py\n@@ -143,10 +143,10 @@(...TRUNCATED) |
"conda command-line tool provides a convenience command to run the Python executable from a specifie(...TRUNCATED) | "conda/cli/main_run.py\n<|code_start|>\n# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum(...TRUNCATED) | "conda/cli/main_run.py\n# (c) 2012-2013 Continuum Analytics, Inc. / http://continuum.io\n# All Right(...TRUNCATED) | "conda/cli/main_run.py\n--- a/conda/cli/main_run.py\n+++ b/conda/cli/main_run.py\n@@ -7,6 +7,7 @@\n (...TRUNCATED) |
"Use rmtree workaround for write-protected files on Windows\nSee https://stackoverflow.com/questions(...TRUNCATED) | "conda/install.py\n<|code_start|>\n# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io\n(...TRUNCATED) | "conda/install.py\n# (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io\n# All Rights Res(...TRUNCATED) | "conda/install.py\n--- a/conda/install.py\n+++ b/conda/install.py\n@@ -155,11 +155,20 @@ def rm_rf(p(...TRUNCATED) |
"Only try updating outdated packages with update --all\nconda update --all tends to fail a lot becau(...TRUNCATED) | "conda/cli/install.py\n<|code_start|>\n# (c) Continuum Analytics, Inc. / http://continuum.io\n# All (...TRUNCATED) | "conda/cli/install.py\n# (c) Continuum Analytics, Inc. / http://continuum.io\n# All Rights Reserved\(...TRUNCATED) | "conda/cli/install.py\n--- a/conda/cli/install.py\n+++ b/conda/cli/install.py\n@@ -191,13 +191,13 @@(...TRUNCATED) |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 5