function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def test_round_deals_system_card(self): game_round = Round(game=self.game, number=self.current.number + 1, turn=self.current.turn) game_round.deal() self.assertTrue(game_round.card is not None)
jminuscula/dixit-online
[ 70, 19, 70, 23, 1465385513 ]
def test_deal_fails_when_not_enough_cards_available(self): max_players = Card.objects.count() // (settings.GAME_HAND_SIZE + 1) for i in range(max_players + 1): test_username = 'test_n_{}'.format(i) test_email = '{}@localhost'.format(test_username) user = User.objects.create(username=test_username, email=test_email, password='test') Player.objects.create(game=self.game, user=user, name='player_{}'.format(i)) new_round = Round(game=self.game, number=self.current.number + 1, turn=self.current.turn) with self.assertRaises(GameDeckExhausted): new_round.deal()
jminuscula/dixit-online
[ 70, 19, 70, 23, 1465385513 ]
def test_providing_round_can_not_be_closed(self): story_card = self.current.turn._pick_card() story_play = Play.play_for_round(self.current, self.current.turn, story_card, 'test') Play.play_for_round(self.current, self.player2, self.player2._pick_card()) self.assertEqual(self.current.status, RoundStatus.PROVIDING) self.assertRaises(GameRoundIncomplete, self.current.close)
jminuscula/dixit-online
[ 70, 19, 70, 23, 1465385513 ]
def test_complete_round_can_be_closed(self): story_card = self.current.turn._pick_card() Play.play_for_round(self.current, self.game.storyteller, story_card, 'story') players = self.game.players.all().exclude(id=self.game.storyteller.id) for player in players: Play.play_for_round(self.current, player, player._pick_card()) plays = self.current.plays.all().exclude(player=self.game.storyteller) for play in plays: play.vote_card(story_card) self.assertEqual(self.current.status, RoundStatus.COMPLETE) self.current.close()
jminuscula/dixit-online
[ 70, 19, 70, 23, 1465385513 ]
def test_storyteller_doesnt_score_when_all_players_guess(self): story_card = self.current.turn._pick_card() story_play = Play.play_for_round(self.current, self.current.turn, story_card, 'test') card2 = self.player2._pick_card() play2 = Play.play_for_round(self.current, self.player2, card2) card3 = self.player3._pick_card() play3 = Play.play_for_round(self.current, self.player3, card3) play2.vote_card(story_card) play3.vote_card(story_card) self.current.close() self.current.turn.refresh_from_db() self.assertEqual(self.current.turn.score, 0)
jminuscula/dixit-online
[ 70, 19, 70, 23, 1465385513 ]
def __init__( self, accessDictionary = None, accessFile = "" ): """ @summary: LogFileAccessManager constructor.
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def saveAccessDictionary( self ): """ @summary: Saves the current accessDictionary into the accessfile. """
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def loadAccessFile(self): """ @summary: Loads the accessFile into the accessDictionary.
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def getLineAssociatedWith( self, identifier ): """ @param identifier: Identifier string of the following format: fileType_client/sourcename_machineName
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def getLastReadPositionAssociatedWith(self, identifier): """ @param identifier: Identifier string of the following format: fileType_client/sourcename_machineName
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def getFirstLineFromFile(self, fileName): """ @summary: Reads the first line of a file and returns it.
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def getFirstLineAndLastReadPositionAssociatedwith(self, identifier): """ @param identifier: Identifier string of the following format: fileType_client/sourcename_machineName
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def setFirstLineAssociatedwith(self, firstLine, identifier ): """ @summary: Simple setter that hides data structure implementation so that methods still work if implementation is ever to change.
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def setLastReadPositionAssociatedwith(self, lastReadPosition, identifier ): """ @summary: Simple setter that hides data structure implementation so that methods still work if implementation is ever to change.
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def setFirstLineAndLastReadPositionAssociatedwith(self, firstLine, lastReadPosition, identifier ): """ @summary: Simple setter that hides data structure implementation so that methods still work if implementation is ever to change.
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def isTheLastFileThatWasReadByThisIdentifier(self, fileName, identifier ): """ @summary : Returns whether or not(True or False ) the specified file was the last one read by the identifier.
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def main(): """ @summary: Small test case to see if everything works out well.
khosrow/metpx
[ 1, 1, 1, 1, 1446661693 ]
def __new__(cls, modified, added, removed, deleted, unknown, ignored, clean): assert all(isinstance(f, str) for f in modified) assert all(isinstance(f, str) for f in added) assert all(isinstance(f, str) for f in removed) assert all(isinstance(f, str) for f in deleted) assert all(isinstance(f, str) for f in unknown) assert all(isinstance(f, str) for f in ignored) assert all(isinstance(f, str) for f in clean) return tuple.__new__( cls, (modified, added, removed, deleted, unknown, ignored, clean) )
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def modified(self): """files that have been modified""" return self[0]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def added(self): """files that have been added""" return self[1]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def removed(self): """files that have been removed""" return self[2]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def deleted(self): """files that are in the dirstate, but have been deleted from the working copy (aka "missing") """ return self[3]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def unknown(self): """files not in the dirstate that are not ignored""" return self[4]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def ignored(self): """files not in the dirstate that are ignored (by _dirignore())""" return self[5]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def clean(self): """files that have not been modified""" return self[6]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def nochangesfound(ui, repo, excluded=None): """Report no changes for push/pull, excluded is None or a list of nodes excluded from the push/pull. """ secretlist = [] if excluded: for n in excluded: ctx = repo[n] if ctx.phase() >= phases.secret: secretlist.append(n) if secretlist: ui.status( _("no changes found (ignored %d secret changesets)\n") % len(secretlist) ) else: ui.status(_("no changes found\n"))
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def _uploadtraceback(ui, message, trace): key = "flat/errortrace-%(host)s-%(pid)s-%(time)s" % { "host": socket.gethostname(), "pid": os.getpid(), "time": time.time(), } payload = message + "\n\n" + trace # TODO: Move this into a background task that renders from # blackbox instead. ui.log("errortrace", "Trace:\n%s\n", trace, key=key, payload=payload) ui.log("errortracekey", "Trace key:%s\n", key, errortracekey=key)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def checknewlabel(repo, lbl, kind): # Do not use the "kind" parameter in ui output. # It makes strings difficult to translate. if lbl in ["tip", ".", "null"]: raise error.Abort(_("the name '%s' is reserved") % lbl) for c in (":", "\0", "\n", "\r"): if c in lbl: raise error.Abort(_("%r cannot be used in a name") % c) try: int(lbl) raise error.Abort(_("cannot use an integer as a name")) except ValueError: pass
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def checkportable(ui, f): """Check if filename f is portable and warn or abort depending on config""" checkfilename(f) abort, warn = checkportabilityalert(ui) if abort or warn: msg = winutil.checkwinfilename(f) if msg: msg = "%s: %s" % (msg, util.shellquote(f)) if abort: raise error.Abort(msg) ui.warn(_("%s\n") % msg, notice=_("warning"))
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __init__(self, ui, abort, dirstate): self._ui = ui self._abort = abort if not dirstate._istreestate and not dirstate._istreedirstate: allfiles = "\0".join(dirstate._map) self._loweredfiles = set(encoding.lower(allfiles).split("\0")) else: # Still need an in-memory set to collect files being tested, but # haven't been added to treestate yet. self._loweredfiles = set() self._dirstate = dirstate # The purpose of _newfiles is so that we don't complain about # case collisions if someone were to call this object with the # same filename twice. self._newfiles = set()
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def walkrepos(path, followsym=False, seen_dirs=None, recurse=False): """yield every hg repository under path, always recursively. The recurse flag will only control recursion into repo working dirs""" def errhandler(err): if err.filename == path: raise err samestat = getattr(os.path, "samestat", None) if followsym and samestat is not None: def adddir(dirlst, dirname): match = False dirstat = util.stat(dirname) for lstdirstat in dirlst: if samestat(dirstat, lstdirstat): match = True break if not match: dirlst.append(dirstat) return not match else: followsym = False if (seen_dirs is None) and followsym: seen_dirs = [] adddir(seen_dirs, path) for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler): dirs.sort() if ".hg" in dirs: yield root # found a repository qroot = os.path.join(root, ".hg", "patches") if os.path.isdir(os.path.join(qroot, ".hg")): yield qroot # we have a patch queue repo here if recurse: # avoid recursing inside the .hg directory dirs.remove(".hg") else: dirs[:] = [] # don't descend further elif followsym: newdirs = [] for d in dirs: fname = os.path.join(root, d) if adddir(seen_dirs, fname): if os.path.islink(fname): for hgname in walkrepos(fname, True, seen_dirs): yield hgname else: newdirs.append(d) dirs[:] = newdirs
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def intrev(ctx): """Return integer for a given basectx that can be used in comparison or arithmetic operation""" rev = ctx.rev() if rev is None: return wdirrev return rev
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def revsingle(repo, revspec, default=".", localalias=None): """Resolve a single revset with user-defined revset aliases. This should only be used for resolving user-provided command-line flags or arguments. For internal code paths not interacting with user-provided arguments, use repo.revs (ignores user-defined revset aliases) or repo.anyrevs (respects user-defined revset aliases) instead. """ if not revspec and revspec != 0: return repo[default] # Used by amend/common calling rebase.rebase with non-string opts. if isint(revspec): return repo[revspec] l = revrange(repo, [revspec], localalias=localalias) if not l: raise error.Abort(_("empty revision set")) return repo[l.last()]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def revpair(repo, revs): if not revs: return repo.dirstate.p1(), None l = revrange(repo, revs) if not l: first = second = None elif l.isascending(): first = l.min() second = l.max() elif l.isdescending(): first = l.max() second = l.min() else: first = l.first() second = l.last() if first is None: raise error.Abort(_("empty revision range")) if ( first == second and len(revs) >= 2 and not all(revrange(repo, [r]) for r in revs) ): raise error.Abort(_("empty revision on one side of range")) # if top-level is range expression, the result must always be a pair if first == second and len(revs) == 1 and not _pairspec(revs[0]): return repo.lookup(first), None return repo.lookup(first), repo.lookup(second)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def expandpats(pats): """Expand bare globs when running on windows. On posix we assume it already has already been done by sh.""" if not util.expandglobs: return list(pats) ret = [] for kindpat in pats: kind, pat = matchmod._patsplit(kindpat, None) if kind is None: try: globbed = glob.glob(pat) except re.error: globbed = [pat] if globbed: ret.extend(globbed) continue ret.append(kindpat) return ret
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def bad(f, msg): ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def match( ctx, pats=(), opts=None, globbed=False, default="relpath", badfn=None, emptyalways=True,
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def matchall(repo): """Return a matcher that will efficiently match everything.""" return matchmod.always(repo.root, repo.getcwd())
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def parsefollowlinespattern(repo, rev, pat, msg): """Return a file name from `pat` pattern suitable for usage in followlines logic. """ if not matchmod.patkind(pat): return pathutil.canonpath(repo.root, repo.getcwd(), pat) else: ctx = repo[rev] m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx) files = [f for f in ctx if m(f)] if len(files) != 1: raise error.ParseError(msg) return files[0]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __init__(self, repo, revcontainer): self._torev = repo.changelog.rev self._revcontains = revcontainer.__contains__
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def cleanupnodes(repo, replacements, operation, moves=None, metadata=None): """do common cleanups when old nodes are replaced by new nodes That includes writing obsmarkers or stripping nodes, and moving bookmarks. (we might also want to move working directory parent in the future) By default, bookmark moves are calculated automatically from 'replacements', but 'moves' can be used to override that. Also, 'moves' may include additional bookmark moves that should not have associated obsmarkers. replacements is {oldnode: [newnode]} or a iterable of nodes if they do not have replacements. operation is a string, like "rebase". metadata is dictionary containing metadata to be stored in obsmarker if obsolescence is enabled. Return the calculated 'moves' mapping that is from a single old node to a single new node. """ if not replacements and not moves: return {} # translate mapping's other forms if not util.safehasattr(replacements, "items"): replacements = {n: () for n in replacements} # Calculate bookmark movements if moves is None: moves = {} # Unfiltered repo is needed since nodes in replacements might be hidden. unfi = repo for oldnode, newnodes in replacements.items(): if oldnode in moves: continue if len(newnodes) > 1: # usually a split, take the one with biggest rev number newnode = next(unfi.set("max(%ln)", newnodes)).node() elif len(newnodes) == 0: # Handle them in a second loop continue else: newnode = newnodes[0] moves[oldnode] = newnode # Move bookmarks pointing to stripped commits backwards. # If hit a replaced node, use the replacement. def movebackwards(node): p1 = unfi.changelog.parents(node)[0] if p1 == nullid: return p1 elif p1 in moves: return moves[p1] elif p1 in replacements: return movebackwards(p1) else: return p1 for oldnode, newnodes in replacements.items(): if oldnode in moves: continue assert len(newnodes) == 0 moves[oldnode] = movebackwards(oldnode) with repo.transaction("cleanup") as tr: # Move bookmarks bmarks = repo._bookmarks bmarkchanges = [] allnewnodes = [n for ns in replacements.values() for n in ns] for oldnode, newnode in moves.items(): oldbmarks = repo.nodebookmarks(oldnode) if not oldbmarks: continue from . import bookmarks # avoid import cycle repo.ui.debug( "moving bookmarks %r from %s to %s\n" % (oldbmarks, hex(oldnode), hex(newnode)) ) # Delete divergent bookmarks being parents of related newnodes deleterevs = repo.revs( "parents(roots(%ln & (::%n))) - parents(%n)", allnewnodes, newnode, oldnode, ) deletenodes = _containsnode(repo, deleterevs) for name in oldbmarks: bmarkchanges.append((name, newnode)) for b in bookmarks.divergent2delete(repo, deletenodes, name): bmarkchanges.append((b, None)) if bmarkchanges: bmarks.applychanges(repo, tr, bmarkchanges) # adjust visibility, or strip nodes strip = True if visibility.tracking(repo): visibility.remove(repo, replacements.keys()) strip = False if strip: from . import repair # avoid import cycle tostrip = list(replacements) if tostrip: repair.delayedstrip(repo.ui, repo, tostrip, operation) return moves
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def badfn(f, msg): if f in m.files(): m.bad(f, msg) rejected.append(f)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def marktouched(repo, files, similarity=0.0): """Assert that files have somehow been operated upon. files are relative to the repo root.""" m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x)) rejected = [] added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m) if repo.ui.verbose: unknownset = set(unknown + forgotten) toprint = unknownset.copy() toprint.update(deleted) for abs in sorted(toprint): if abs in unknownset: status = _("adding %s\n") % abs else: status = _("removing %s\n") % abs repo.ui.status(status) renames = _findrenames(repo, m, added + unknown, removed + deleted, similarity) _markchanges(repo, unknown + forgotten, deleted, renames) for f in rejected: if f in m.files(): return 1 return 0
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def _findrenames(repo, matcher, added, removed, similarity): """Find renames from removed files to added ones.""" renames = {} if similarity > 0: for old, new, score in similar.findrenames(repo, added, removed, similarity): if repo.ui.verbose or not matcher.exact(old) or not matcher.exact(new): repo.ui.status( _("recording removal of %s as rename to %s " "(%d%% similar)\n") % (matcher.rel(old), matcher.rel(new), score * 100) ) renames[new] = old return renames
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None): """Update the dirstate to reflect the intent of copying src to dst. For different reasons it might not end with dst being marked as copied from src. """ origsrc = repo.dirstate.copied(src) or src if dst == origsrc: # copying back a copy? if repo.dirstate[dst] not in "mn" and not dryrun: repo.dirstate.normallookup(dst) else: if repo.dirstate[origsrc] == "a" and origsrc == src: if not ui.quiet: ui.warn( _( "%s has not been committed yet, so no copy " "data will be stored for %s.\n" ) % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)) ) if repo.dirstate[dst] in "?r" and not dryrun: wctx.add([dst]) elif not dryrun: wctx.copy(origsrc, dst)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def writerequires(opener, requirements): content = "".join("%s\n" % r for r in sorted(requirements)) opener.writeutf8("requires", content)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __init__(self, path, stat): self.path = path self.cachestat = None if stat: path = self.path else: path = None self.cachestat = filecachesubentry.stat(path)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def changed(self): newstat = filecachesubentry.stat(self.path) if self.cachestat != newstat: self.cachestat = newstat return True else: return False
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def stat(path): return util.cachestat(path)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __init__(self, paths, stat=True): self._entries = [] for path in paths: self._entries.append(filecachesubentry(path, stat))
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def refresh(self): for entry in self._entries: entry.refresh()
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __init__(self, *paths): self.paths = [ path if isinstance(path, tuple) else (path, self.join) for path in paths ]
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __call__(self, func): self.func = func self.name = func.__name__ return self
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __set__(self, obj, value): if self.name not in obj._filecache: # we add an entry for the missing value because X in __dict__ # implies X in _filecache paths = [joiner(obj, path) for (path, joiner) in self.paths] ce = filecacheentry(paths, False) obj._filecache[self.name] = ce else: ce = obj._filecache[self.name] ce.obj = value # update cached copy obj.__dict__[self.name] = value # update copy returned by obj.x
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def extdatasource(repo, source): """Gather a map of rev -> value dict from the specified source A source spec is treated as a URL, with a special case shell: type for parsing the output from a shell command. The data is parsed as a series of newline-separated records where each record is a revision specifier optionally followed by a space and a freeform string value. If the revision is known locally, it is converted to a rev, otherwise the record is skipped. Note that both key and value are treated as UTF-8 and converted to the local encoding. This allows uniformity between local and remote data sources. """ spec = repo.ui.config("extdata", source) if not spec: raise error.Abort(_("unknown extdata source '%s'") % source) data = {} src = proc = None try: if spec.startswith("shell:"): # external commands should be run relative to the repo root cmd = spec[6:] proc = subprocess.Popen( cmd, shell=True, bufsize=-1, close_fds=util.closefds, stdout=subprocess.PIPE, cwd=repo.root, ) src = proc.stdout else: # treat as a URL or file src = url.open(repo.ui, spec) for l in src: if b" " in l: k, v = l.strip().split(b" ", 1) else: k, v = l.strip(), b"" k = k.decode("utf8") try: data[repo[k].rev()] = v.decode("utf8") except (error.LookupError, error.RepoLookupError): pass # we ignore data for nodes that don't exist locally finally: if proc: proc.communicate() if src: src.close() if proc and proc.returncode != 0: raise error.Abort( _("extdata command '%s' failed: %s") % (cmd, util.explainexit(proc.returncode)[0]) ) return data
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def gddeltaconfig(ui): """helper function to know if incoming delta should be optimised""" # experimental config: format.generaldelta return ui.configbool("format", "generaldelta")
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __init__(self, vfs, path, keys=None): self.vfs = vfs self.path = path
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def write(self, data, firstline=None): """Write key=>value mapping to a file data is a dict. Keys must be alphanumerical and start with a letter. Values must not contain newline characters. If 'firstline' is not None, it is written to file before everything else, as it is, not in a key=value form""" lines = [] if firstline is not None: lines.append("%s\n" % firstline) for k, v in data.items(): if k == self.firstlinekey: e = "key name '%s' is reserved" % self.firstlinekey raise error.ProgrammingError(e) if not k[0].isalpha(): e = "keys must start with a letter in a key-value file" raise error.ProgrammingError(e) if not k.isalnum(): e = "invalid key name in a simple key-value file" raise error.ProgrammingError(e) if "\n" in v: e = "invalid value in a simple key-value file" raise error.ProgrammingError(e) lines.append("%s=%s\n" % (k, v)) with self.vfs(self.path, mode="wb", atomictemp=True) as fp: fp.write("".join(lines).encode("utf-8"))
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def wrapconvertsink(sink): """Allow extensions to wrap the sink returned by convcmd.convertsink() before it is used, whether or not the convert extension was formally loaded. """ return sink
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def trackrevnumfortests(repo, specs): """Attempt to collect information to replace revision number with revset expressions in tests. This works with the TESTFILE and TESTLINE environment variable set by run-tests.py. Information will be written to $TESTDIR/.testrevnum. """ if not util.istest(): return trackrevnum = encoding.environ.get("TRACKREVNUM") testline = encoding.environ.get("TESTLINE") testfile = encoding.environ.get("TESTFILE") testdir = encoding.environ.get("TESTDIR") if not trackrevnum or not testline or not testfile or not testdir: return for spec in specs: # 'spec' should be in sys.argv if not any(spec in a for a in pycompat.sysargv): continue # Consider 'spec' as a revision number. rev = int(spec) if rev < -1: continue ctx = repo[rev] if not ctx: return # Check candidate revset expressions. candidates = [] if rev == -1: candidates.append("null") desc = ctx.description() if desc: candidates.append("desc(%s)" % desc.split()[0]) candidates.append("max(desc(%s))" % desc.split()[0]) candidates.append("%s" % ctx.hex()) for candidate in candidates: try: nodes = list(repo.nodes(candidate)) except Exception: continue if nodes == [ctx.node()]: with open(testdir + "/.testrevnum", "ab") as f: f.write( "fix(%r, %s, %r, %r)\n" % (testfile, testline, spec, candidate) ) break
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def revf64decode(rev): """Convert rev encoded by revf64encode back to the original rev >>> revs = [i + j for i in [0, 1 << 56] for j in range(2)] + [None] >>> encoded = [revf64encode(i) for i in revs] >>> decoded = [revf64decode(i) for i in encoded] >>> revs == decoded True """ if rev is not None and 0x1000000000000 <= rev < 0x100000000000000: rev += 0xFF000000000000 return rev
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def prefixlines(raw): """Surround lineswith a comment char and a new line""" lines = raw.splitlines() commentedlines = ["# %s" % line for line in lines] return "\n".join(commentedlines) + "\n"
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def helpmessage(ui, continuecmd, abortcmd): msg = _("To continue: %s\n" "To abort: %s") % ( continuecmd, abortcmd, ) ui.warn(prefixlines(msg))
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def histeditmsg(repo, ui): helpmessage(ui, "hg histedit --continue", "hg histedit --abort")
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def updatecleanmsg(dest=None): warning = _("warning: this will discard uncommitted changes") return "hg update --clean %s (%s)" % (dest or ".", warning)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def updatemsg(repo, ui): previousargs = repo.localvfs.tryreadutf8(UPDATEARGS) if previousargs: continuecmd = "hg " + previousargs else: continuecmd = "hg update " + repo.localvfs.readutf8("updatestate")[:12] abortcmd = updatecleanmsg(repo._activebookmark) helpmessage(ui, continuecmd, abortcmd)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def mergemsg(repo, ui): # tweakdefaults requires `update` to have a rev hence the `.` helpmessage(ui, "hg commit", updatecleanmsg())
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def fileexistspredicate(filename): return lambda repo: repo.localvfs.exists(filename)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def extsetup(ui): if ui.configbool("morestatus", "show") and not ui.plain(): wrapcommand(commands.table, "status", statuscmd) # Write down `hg update` args to show the continue command in # interrupted update state. ui.setconfig("hooks", "pre-update.morestatus", saveupdateargs) ui.setconfig("hooks", "post-update.morestatus", cleanupdateargs)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def cleanupdateargs(repo, **kwargs): with repo.wlock(): repo.localvfs.tryunlink(UPDATEARGS)
facebookexperimental/eden
[ 4737, 192, 4737, 106, 1462467227 ]
def __init__(self, backends): """ Constructor, creating all the gtk widgets @param backends: a reference to the dialog in which this is loaded """ super().__init__(orientation=Gtk.Orientation.VERTICAL) self.dialog = backends self.should_spinner_be_shown = False self.task_deleted_handle = None self.task_added_handle = None self.req = backends.get_requester() self._create_widgets() self._connect_signals()
getting-things-gnome/gtg
[ 519, 158, 519, 226, 1394882179 ]
def _create_widgets(self): """ This function fills this box with widgets """ # Division of the available space in three segments: # top, middle and bottom (parameters_ui) top = Gtk.Box() top.set_spacing(6) middle = Gtk.Box() middle.set_spacing(6) self._fill_top_box(top) self._fill_middle_box(middle) self.pack_start(top, False, True, 0) self.pack_start(middle, False, True, 0) align = Gtk.Alignment.new(0, 0, 1, 0) align.set_padding(10, 0, 0, 0) self.parameters_ui = ParametersUI(self.req) align.add(self.parameters_ui) self.pack_start(align, False, True, 0)
getting-things-gnome/gtg
[ 519, 158, 519, 226, 1394882179 ]
def _fill_middle_box(self, box): """ Helper function to fill an box with a label and a button @param box: the Gtk.Box to fill """ self.sync_status_label = Gtk.Label() self.sync_status_label.set_alignment(xalign=0.8, yalign=0.5) self.sync_button = Gtk.Button() self.sync_button.connect("clicked", self.on_sync_button_clicked) box.pack_start(self.sync_status_label, True, True, 0) box.pack_start(self.sync_button, True, True, 0)
getting-things-gnome/gtg
[ 519, 158, 519, 226, 1394882179 ]
def refresh_title(self, sender=None, data=None): """ Callback for the signal that notifies backends name changes. It changes the title of this view @param sender: not used, here only for signal callback compatibility @param data: not used, here only for signal callback compatibility """ markup = "<big><big><big><b>%s</b></big></big></big>" % \ self.backend.get_human_name() self.human_name_label.set_markup(markup)
getting-things-gnome/gtg
[ 519, 158, 519, 226, 1394882179 ]
def refresh_sync_status_label(self): """ Refreshes the Gtk.Label that shows the current state of this backend """ if self.backend.is_default(): label = _("This is the default synchronization service") else: if self.backend.is_enabled(): label = _("Syncing is enabled.") else: label = _('Syncing is <span color="red">disabled</span>.') self.sync_status_label.set_markup(label)
getting-things-gnome/gtg
[ 519, 158, 519, 226, 1394882179 ]
def on_sync_button_clicked(self, sender): """ Signal callback when a backend is enabled/disabled via the UI button @param sender: not used, here only for signal callback compatibility """ self.parameters_ui.commit_changes() self.req.set_backend_enabled(self.backend.get_id(), not self.backend.is_enabled())
getting-things-gnome/gtg
[ 519, 158, 519, 226, 1394882179 ]
def on_sync_ended(self, sender, backend_id): """ If the backend has stopped syncing tasks, update the state of the Gtk.Spinner @param sender: not used, here only for signal callback compatibility @param backend_id: the id of the backend that emitted this signal """ if backend_id == self.backend.get_id(): self.spinner_set_active(False)
getting-things-gnome/gtg
[ 519, 158, 519, 226, 1394882179 ]
def __init__(self, covar): self.name = covar self.pop_ronas = defaultdict(list) self.corr_coef = {} self.avg_ronas = [] self.stderr_ronas = []
StuntsPT/pyRona
[ 9, 2, 9, 2, 1478901588 ]
def count_markers(self): """ Counts the number of markers in the instance. """ return len(self.pop_ronas)
StuntsPT/pyRona
[ 9, 2, 9, 2, 1478901588 ]
def results_summary(ronas, use_weights): """ This function outputs a summary of the RONAS for each population and covariate. """ pop_names = ronas[0].pop_names for i, j in enumerate(pop_names): if i == 0: print("Covar\t%s" % "\t".join([x.name for x in ronas])) print("#SNPs\t%s" % "\t".join([str(x.count_markers()) for x in ronas])) print("%s\t%s" % (j, "\t".join([str(x.avg_ronas[i]) for x in ronas]))) print("Min R^2\t%s" % "\t".join([str(np.nanmin(list(x.corr_coef.values()))) for x in ronas])) print("Max R^2\t%s" % "\t".join([str(np.nanmax(list(x.corr_coef.values()))) for x in ronas])) # if use_weights is True: # means = [str(np.average(list(x.corr_coef.values()), # weights=list(x.corr_coef.values()))) for x in # ronas] # else: means = [str(np.nanmean(list(x.corr_coef.values()))) for x in ronas] print("Average R^2\t%s" % "\t".join(means))
StuntsPT/pyRona
[ 9, 2, 9, 2, 1478901588 ]
def main(): """ Main function. Takes all the inputs as arguments and runs the remaining functions of the program. """ if len(argv) < 2: arg_list = ["-h"] else: arg_list = argv[1:] arg = argument_parser(arg_list) if arg.upstream == "baypass": present_covariates = fp.parse_baypass_envfile(arg.present_covars_file) future_covariates = fp.parse_baypass_envfile(arg.future_covars_file) RonaClass.POP_NAMES = fp.popnames_parser(arg.popnames_file) assocs = fp.baypass_summary_betai_parser( arg.baypass_summary_betai_file, arg.bayes_factor, arg.immutables) al_freqs = fp.baypass_pij_parser(arg.baypass_pij_file, assocs) elif arg.upstream == "lfmm": present_covariates = fp.parse_lfmm_envfile(arg.present_covars_file) future_covariates = fp.parse_lfmm_envfile(arg.future_covars_file) assocs = fp.lfmm_results_parser(arg.lfmm_assoc_file, arg.p_thres, arg.immutables) RonaClass.POP_NAMES, al_freqs = fp.lfmm_to_pop_allele_freqs( arg.allele_freqs_file, arg.present_covars_file, assocs, popnames=True) ronas = {} for assoc in assocs: marker, covar = assoc # Instanciate class if covar not in ronas: rona = RonaClass(covar) else: rona = ronas[covar] calculate_rona(marker, rona, present_covariates[int(covar) - 1], future_covariates[int(covar) - 1], al_freqs[marker], arg.plots, arg.outliers, arg.rtype) ronas[covar] = rona ronas = ronas_filterer(ronas, arg.use_weights, arg.num_covars) results_summary(ronas, arg.use_weights) gp.draw_rona_plot(ronas, arg.outfile) if arg.map_filename is not None: # The map plotting module is only imported if a map plot is requested. # This is to be able to keep 'cartopy' as an optional dependency. try: import plotters.map_plotter as mapper except ImportError: import pyRona.plotters.map_plotter as mapper mapper.map_plotter(ronas, present_covariates[1], present_covariates[0], arg.map_filename)
StuntsPT/pyRona
[ 9, 2, 9, 2, 1478901588 ]
def course_info(request, course_slug): if is_course_student_by_slug(request, course_slug): return _course_info_student(request, course_slug) elif is_course_staff_by_slug(request, course_slug): return _course_info_staff(request, course_slug) else: return ForbiddenResponse(request)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def reorder_activity(request, course_slug): """ Ajax way to reorder activity. This ajax view function is called in the course_info page. """ course = get_object_or_404(CourseOffering, slug=course_slug) if request.method == 'POST': neaten_activity_positions(course) # find the activities in question id_up = request.POST.get('id_up') id_down = request.POST.get('id_down') if id_up == None or id_down == None: return ForbiddenResponse(request) # swap the position of the two activities activity_up = get_object_or_404(Activity, id=id_up, offering__slug=course_slug) activity_down = get_object_or_404(Activity, id=id_down, offering__slug=course_slug) activity_up.position, activity_down.position = activity_down.position, activity_up.position activity_up.save() activity_down.save()
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def _course_info_staff(request, course_slug): """ Course front page """ course = get_object_or_404(CourseOffering, slug=course_slug) member = Member.objects.get(offering=course, person__userid=request.user.username, role__in=['INST','TA','APPR']) activities = all_activities_filter(offering=course) any_group = True in [a.group for a in activities] try: forum = Forum.objects.get(offering=course) forum_enabled = forum.enabled except Forum.DoesNotExist: forum_enabled = False # Non Ajax way to reorder activity, please also see reorder_activity view function for ajax way to reorder order = None act = None if 'order' in request.GET: order = request.GET['order'] if 'act' in request.GET: act = request.GET['act'] if order and act: reorder_course_activities(activities, act, order) return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) # Todo: is the activity type necessary? activities_info = [] total_percent = 0 for activity in activities: if activity.percent: total_percent += activity.percent if isinstance(activity, NumericActivity): activities_info.append({'activity':activity, 'type':ACTIVITY_TYPE['NG']}) elif isinstance(activity, LetterActivity): activities_info.append({'activity':activity, 'type':ACTIVITY_TYPE['LG']}) if len(activities) == 0: num_pages = Page.objects.filter(offering=course) if num_pages == 0: messages.info(request, "Students won't see this course in their menu on the front page. As soon as some activities or pages have been added, they will see a link to the course info page.")
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def course_config(request, course_slug): from forum.models import Forum course = get_object_or_404(CourseOffering, slug=course_slug) try: forum = Forum.objects.get(offering=course) except Forum.DoesNotExist: forum = Forum(offering=course) forum.enabled = False if request.method=="POST": form = CourseConfigForm(request.POST) if form.is_valid(): course.set_url(form.cleaned_data['url']) course.set_taemail(form.cleaned_data['taemail']) #if course.uses_svn(): # course.set_indiv_svn(form.cleaned_data['indiv_svn']) # course.set_instr_rw_svn(form.cleaned_data['instr_rw_svn']) course.set_group_min(form.cleaned_data['group_min']) course.set_group_max(form.cleaned_data['group_max']) course.save() forum.enabled = form.cleaned_data['forum'] forum.identity = form.cleaned_data['forum_identity'] forum.save() messages.success(request, 'Course config updated') #LOG EVENT# l = LogEntry(userid=request.user.username, description=("updated config for %s") % (course), related_object=course) l.save() return HttpResponseRedirect(reverse('offering:course_info', kwargs={'course_slug': course_slug})) else: form = CourseConfigForm({'url': course.url(), 'taemail': course.taemail(), 'forum': forum.enabled, 'forum_identity': forum.identity, 'indiv_svn': course.indiv_svn(), 'instr_rw_svn': course.instr_rw_svn(), 'group_min': course.group_min(),'group_max': course.group_max()})
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def _course_info_student(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(offering=course) activities = [a for a in activities if a.status in ['RLS', 'URLS']] any_group = True in [a.group for a in activities] has_index = bool(Page.objects.filter(offering=course, label="Index", can_read__in=ACL_ROLES['STUD'])) try: forum = Forum.objects.get(offering=course) forum_enabled = forum.enabled except Forum.DoesNotExist: forum_enabled = False activity_data = [] student = Member.objects.get(offering=course, person__userid=request.user.username, role='STUD') for activity in activities: data = {} data['act'] = activity data['grade_display'] = activity.display_grade_student(student.person) activity_data.append(data)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def activity_info_oldurl(request, course_slug, activity_slug, tail): """ Redirect old activity URLs to new (somewhat intelligently: don't redirect if there's no activity there) """ course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(Activity, slug=activity_slug, offering=course) act_url = reverse('offering:activity_info', kwargs={'course_slug': course.slug, 'activity_slug': activity.slug}) return HttpResponseRedirect(act_url + tail)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def activity_info(request, course_slug, activity_slug): if is_course_student_by_slug(request, course_slug): return _activity_info_student(request, course_slug, activity_slug) elif is_course_staff_by_slug(request, course_slug): return _activity_info_staff(request, course_slug, activity_slug) else: return ForbiddenResponse(request)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def _activity_info_student(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(slug=activity_slug, offering=course)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def activity_info_with_groups(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug = course_slug) activities = all_activities_filter(slug=activity_slug, offering=course) if len(activities) != 1: return NotFoundResponse(request)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def activity_stat(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities = all_activities_filter(slug=activity_slug, offering=course)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def activity_choice(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) context = {'course': course} return render(request, 'grades/activity_choice.html', context)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def edit_cutoffs(request, course_slug, activity_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(CalLetterActivity, slug=activity_slug, offering=course, deleted=False) if request.method == 'POST': form = CutoffForm(request.POST) if form.is_valid(): # All validation rules pass activity.set_cutoffs(form.cleaned_data['cutoffs']) activity.save()
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def _cutoffsdict(cutoff): data = dict() data['ap'] = cutoff[0] data['a'] = cutoff[1] data['am'] = cutoff[2] data['bp'] = cutoff[3] data['b'] = cutoff[4] data['bm'] = cutoff[5] data['cp'] = cutoff[6] data['c'] = cutoff[7] data['cm'] = cutoff[8] data['d'] = cutoff[9] return data
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def compare_official(request, course_slug, activity_slug): """ Screen to compare member.official_grade to this letter activity """ course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(LetterActivity, slug=activity_slug, offering=course, deleted=False)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def grade_change(request, course_slug, activity_slug, userid): """ Produce grade change form """ course = get_object_or_404(CourseOffering, slug=course_slug) activity = get_object_or_404(LetterActivity, slug=activity_slug, offering=course, deleted=False) member = get_object_or_404(Member, ~Q(role='DROP'), find_member(userid), offering__slug=course_slug) user = Person.objects.get(userid=request.user.username) grades = LetterGrade.objects.filter(activity_id=activity.id, member=member).exclude(flag='NOGR') if grades: grade = grades[0].letter_grade else: grade = None
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def add_numeric_activity(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) activities_list = [(None, '\u2014'),] activities = all_activities_filter(course) for a in activities: if a.group == True: activities_list.append((a.slug, a.name))
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def add_cal_numeric_activity(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) numeric_activities = NumericActivity.objects.filter(offering=course, deleted=False)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def add_cal_letter_activity(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) letter_activities = LetterActivity.objects.filter(offering=course) numact_choices = [(na.pk, na.name) for na in NumericActivity.objects.filter(offering=course, deleted=False)] examact_choices = [(0, '\u2014')] + [(na.pk, na.name) for na in Activity.objects.filter(offering=course, deleted=False)] if request.method == 'POST': # If the form has been submitted... form = CalLetterActivityForm(request.POST) # A form bound to the POST data form.fields['numeric_activity'].choices = numact_choices form.fields['exam_activity'].choices = examact_choices form.activate_addform_validation(course_slug) if form.is_valid(): # All validation rules pass try: aggr_dict = Activity.objects.filter(offering=course).aggregate(Max('position')) if not aggr_dict['position__max']: position = 1 else: position = aggr_dict['position__max'] + 1 if form.cleaned_data['exam_activity'] == '0': exam_activity_id = None else: exam_activity = Activity.objects.get(pk=form.cleaned_data['exam_activity']) exam_activity_id = exam_activity.id config = { 'showstats': form.cleaned_data['showstats'], 'showhisto': form.cleaned_data['showhisto'], 'url': form.cleaned_data['url'], } CalLetterActivity.objects.create(name=form.cleaned_data['name'], short_name=form.cleaned_data['short_name'], status=form.cleaned_data['status'], numeric_activity=NumericActivity.objects.get(pk=form.cleaned_data['numeric_activity']), exam_activity_id=exam_activity_id, offering=course, position=position, group=False, config=config) except NotImplementedError: return NotFoundResponse(request)
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]
def formula_tester(request, course_slug): course = get_object_or_404(CourseOffering, slug=course_slug) numeric_activities = NumericActivity.objects.filter(offering=course, deleted=False) result = ""
sfu-fas/coursys
[ 61, 17, 61, 39, 1407368110 ]