function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def n_rvs(self): return self._alpha.size
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, n, alpha=None, beta=None, mean=None, var=None): self.n = n if alpha is not None and beta is not None: self.alpha = alpha self.beta = beta self.mean = n * alpha / (alpha + beta) self.var = n * alpha * beta * (alpha + beta + n) / ((alpha + beta) ** 2 * (alpha + beta + 1)) elif mean is not None and var is not None: self.mean = mean self.var = var self.alpha = - mean * (var + mean **2 - n * mean) / (mean ** 2 + n * (var - mean)) self.beta = (n - mean) * (var + mean ** 2 - n * mean) / ((n - mean) * mean - n * var) else: raise ValueError("BetaBinomialDistribution requires either (alpha and beta) or (mean and var).") # Beta-binomial is a compound distribution, drawing binomial # RVs off of a beta-distrubuted bias. self._p_dist = st.beta(a=self.alpha, b=self.beta)
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return 1
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, alpha=None, beta=None, mean=None, var=None): if alpha is not None and beta is not None: self.alpha = alpha self.beta = beta self.mean = alpha / beta self.var = alpha / beta ** 2 elif mean is not None and var is not None: self.mean = mean self.var = var self.alpha = mean ** 2 / var self.beta = mean / var else: raise ValueError("GammaDistribution requires either (alpha and beta) or (mean and var).") # This is the distribution we want up to a scale factor of beta self._dist = st.gamma(self.alpha)
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return 1
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, dim = 6): warnings.warn( "This class has been deprecated, and may " "be renamed in future versions.", DeprecationWarning ) self._dim = dim
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return self._dim
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, num_bits): self._num_bits = num_bits
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return 1
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, dim=2): warnings.warn( "This class has been deprecated; please see " "qinfer.tomography.GinibreDistribution(rank=None).", DeprecationWarning ) self.dim = dim self.paulis1Q = np.array([[[1,0],[0,1]],[[1,0],[0,-1]],[[0,-1j],[1j,0]],[[0,1],[1,0]]]) self.paulis = self.make_Paulis(self.paulis1Q, 4)
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return self.dim**2 - 1
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def make_Paulis(self,paulis,d): if d == self.dim*2: return paulis else: temp = np.zeros([d**2,d,d],dtype='complex128') for idx in range(temp.shape[0]): temp[idx,:] = np.kron(paulis[np.trunc(idx/d)], self.paulis1Q[idx % 4]) return self.make_Paulis(temp,d*2)
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, dim=2): warnings.warn( "This class has been deprecated; please see " "qinfer.tomography.GinibreDistribution(rank=1).", DeprecationWarning ) # TODO: add basis as an option self.dim = dim
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return 3
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self,dim=2, k=2): warnings.warn( "This class has been deprecated; please see " "qinfer.tomography.GinibreDistribution.", DeprecationWarning ) self.dim = dim self.k = k
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return 3
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, distribution, model, maxiters=100): self._dist = distribution self._model = model self._maxiters = maxiters
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return self._dist.n_rvs
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def grad_log_pdf(self, x): return self._dist.grad_log_pdf(x)
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, pdf, compactification_scale=1, n_interp_points=1500): self._pdf = pdf self._xs = u.compactspace(compactification_scale, n_interp_points) self._generate_interp()
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return 1
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def __init__(self, underlying_distribution, desired_total=1): super(ConstrainedSumDistribution, self).__init__() self._ud = underlying_distribution self.desired_total = desired_total
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def underlying_distribution(self): return self._ud
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def n_rvs(self): return self.underlying_distribution.n_rvs
csferrie/python-qinfer
[ 91, 32, 91, 22, 1344992565 ]
def Args(parser): health_checks_utils.AddHttpRelatedCreationArgs(parser) health_checks_utils.AddProtocolAgnosticCreationArgs(parser, 'HTTP')
KaranToor/MA450
[ 1, 1, 1, 4, 1484697944 ]
def service(self): return self.compute.healthChecks
KaranToor/MA450
[ 1, 1, 1, 4, 1484697944 ]
def method(self): return 'Insert'
KaranToor/MA450
[ 1, 1, 1, 4, 1484697944 ]
def resource_type(self): return 'healthChecks'
KaranToor/MA450
[ 1, 1, 1, 4, 1484697944 ]
def Args(parser): Create.Args(parser) health_checks_utils.AddHttpRelatedResponseArg(parser)
KaranToor/MA450
[ 1, 1, 1, 4, 1484697944 ]
def setUpClass(cls): super(VolumeQuotasNegativeTestJSON, cls).setUpClass() demo_user = cls.isolated_creds.get_primary_creds() cls.demo_tenant_id = demo_user.tenant_id cls.shared_quota_set = {'gigabytes': 3, 'volumes': 1, 'snapshots': 1} # NOTE(gfidente): no need to restore original quota set # after the tests as they only work with tenant isolation. resp, quota_set = cls.quotas_client.update_quota_set( cls.demo_tenant_id, **cls.shared_quota_set) # NOTE(gfidente): no need to delete in tearDown as # they are created using utility wrapper methods. cls.volume = cls.create_volume() cls.snapshot = cls.create_snapshot(cls.volume['id'])
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_quota_volumes(self): self.assertRaises(exceptions.OverLimit, self.volumes_client.create_volume, size=1)
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_quota_volume_snapshots(self): self.assertRaises(exceptions.OverLimit, self.snapshots_client.create_snapshot, self.volume['id'])
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def test_quota_volume_gigabytes(self): # NOTE(gfidente): quota set needs to be changed for this test # or we may be limited by the volumes or snaps quota number, not by # actual gigs usage; next line ensures shared set is restored. self.addCleanup(self.quotas_client.update_quota_set, self.demo_tenant_id, **self.shared_quota_set) new_quota_set = {'gigabytes': 2, 'volumes': 2, 'snapshots': 1} resp, quota_set = self.quotas_client.update_quota_set( self.demo_tenant_id, **new_quota_set) self.assertRaises(exceptions.OverLimit, self.volumes_client.create_volume, size=1) new_quota_set = {'gigabytes': 2, 'volumes': 1, 'snapshots': 2} resp, quota_set = self.quotas_client.update_quota_set( self.demo_tenant_id, **self.shared_quota_set) self.assertRaises(exceptions.OverLimit, self.snapshots_client.create_snapshot, self.volume['id'])
Mirantis/tempest
[ 2, 7, 2, 1, 1327963146 ]
def parse_mailmap(mailmap='.mailmap'): mapping = {} if os.path.exists(mailmap): fp = open(mailmap, 'r') for l in fp: l = l.strip() if not l.startswith('#') and ' ' in l: canonical_email, alias = l.split(' ') mapping[alias] = canonical_email return mapping
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def get_reqs_from_files(requirements_files): reqs_in = [] for requirements_file in requirements_files: if os.path.exists(requirements_file): return open(requirements_file, 'r').read().split('\n') return []
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def parse_dependency_links(requirements_files=['requirements.txt', 'tools/pip-requires']): dependency_links = [] # dependency_links inject alternate locations to find packages listed # in requirements for line in get_reqs_from_files(requirements_files): # skip comments and blank lines if re.match(r'(\s*#)|(\s*$)', line): continue # lines with -e or -f need the whole line, minus the flag if re.match(r'\s*-[ef]\s+', line): dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) # lines that are only urls can go in unmolested elif re.match(r'\s*https?:', line): dependency_links.append(line) return dependency_links
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def _run_shell_command(cmd): output = subprocess.Popen(["/bin/sh", "-c", cmd], stdout=subprocess.PIPE) out = output.communicate() if len(out) == 0: return None if len(out[0].strip()) == 0: return None return out[0].strip()
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def _get_git_current_tag(): return _run_shell_command("git tag --contains HEAD")
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def _get_git_post_version(): current_tag = _get_git_current_tag() if current_tag is not None: return current_tag else: tag_info = _get_git_tag_info() if tag_info is None: base_version = "0.0" cmd = "git --no-pager log --oneline" out = _run_shell_command(cmd) revno = len(out.split("\n")) else: tag_infos = tag_info.split("-") base_version = "-".join(tag_infos[:-2]) revno = tag_infos[-2] return "%s.%s" % (base_version, revno)
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def generate_authors(): """Create AUTHORS file using git commits.""" jenkins_email = 'jenkins@review.openstack.org' old_authors = 'AUTHORS.in' new_authors = 'AUTHORS' if os.path.isdir('.git'): # don't include jenkins email address in AUTHORS file git_log_cmd = ("git log --format='%aN <%aE>' | sort -u | " "grep -v " + jenkins_email) changelog = _run_shell_command(git_log_cmd) mailmap = parse_mailmap() with open(new_authors, 'w') as new_authors_fh: new_authors_fh.write(canonicalize_emails(changelog, mailmap)) if os.path.exists(old_authors): with open(old_authors, "r") as old_authors_fh: new_authors_fh.write('\n' + old_authors_fh.read())
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def read_versioninfo(project): """Read the versioninfo file. If it doesn't exist, we're in a github zipball, and there's really know way to know what version we really are, but that should be ok, because the utility of that should be just about nil if this code path is in use in the first place.""" versioninfo_path = os.path.join(project, 'versioninfo') if os.path.exists(versioninfo_path): with open(versioninfo_path, 'r') as vinfo: version = vinfo.read().strip() else: version = "0.0.0" return version
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def get_cmdclass(): """Return dict of commands to run from setup.py.""" cmdclass = dict() def _find_modules(arg, dirname, files): for filename in files: if filename.endswith('.py') and filename != '__init__.py': arg["%s.%s" % (dirname.replace('/', '.'), filename[:-3])] = True class LocalSDist(sdist.sdist): """Builds the ChangeLog and Authors files from VC first.""" def run(self): write_git_changelog() generate_authors() # sdist.sdist is an old style class, can't use super() sdist.sdist.run(self) cmdclass['sdist'] = LocalSDist # If Sphinx is installed on the box running setup.py, # enable setup.py to build the documentation, otherwise, # just ignore it try: from sphinx.setup_command import BuildDoc class LocalBuildDoc(BuildDoc): def generate_autoindex(self): print "**Autodocumenting from %s" % os.path.abspath(os.curdir) modules = {} option_dict = self.distribution.get_option_dict('build_sphinx') source_dir = os.path.join(option_dict['source_dir'][1], 'api') if not os.path.exists(source_dir): os.makedirs(source_dir) for pkg in self.distribution.packages: if '.' not in pkg: os.path.walk(pkg, _find_modules, modules) module_list = modules.keys() module_list.sort() autoindex_filename = os.path.join(source_dir, 'autoindex.rst') with open(autoindex_filename, 'w') as autoindex: autoindex.write(""".. toctree:: :maxdepth: 1
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def run(self): if not os.getenv('SPHINX_DEBUG'): self.generate_autoindex() for builder in ['html', 'man']: self.builder = builder self.finalize_options() self.project = self.distribution.get_name() self.version = self.distribution.get_version() self.release = self.distribution.get_version() BuildDoc.run(self)
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def get_git_branchname(): for branch in _run_shell_command("git branch --color=never").split("\n"): if branch.startswith('*'): _branch_name = branch.split()[1].strip() if _branch_name == "(no": _branch_name = "no-branch" return _branch_name
chmouel/python-swiftclient
[ 5, 2, 5, 1, 1329139665 ]
def __init__(self, name=""): self.name = name self.instr_group = "Instruction Group" self.instr_format = "Instruction Format" self.instr_category = "Instruction Category" self.instr_name = "Instruction Name" self.instr_imm_t = "Instruction Immediate Type" self.instr_src2 = "Instruction Source 2" self.instr_src1 = "Instruction Source 1" self.instr_rd = "Instruction Destination" self.imm = "Instruction Immediate" self.imm_length = "Instruction Immediate Length" self.imm_str = "" self.csr = "CSR" self.comment = "" self.has_label = 1 self.label = "" self.idx = -1 self.atomic = 0 # As of now, we don't support atomic instructions. self.is_compressed = 0 # As of now, compressed instructions are not supported self.is_illegal_instr = 0 self.is_local_numeric_label = 0 self.is_pseudo_instr = "Is it a pseudo instruction or not" self.branch_assigned = 0 self.process_load_store = 1 self.solution = "A random solution which meets given constraints" self.problem = constraint.Problem(constraint.MinConflictsSolver())
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def convert2asm(self): asm = name = self.solution[self.instr_name] format = self.solution[self.instr_format] category = self.solution[self.instr_category] src2 = self.solution[self.instr_src2] src1 = self.solution[self.instr_src1] destination = self.solution[self.instr_rd] csr = self.solution[self.csr] if category != "SYSTEM": if format == "J_FORMAT" or format == "U_FORMAT": asm += " {}, {}".format(destination, self.get_imm()) elif format == "I_FORMAT": if name == "NOP": asm = "nop" elif name == "FENCE": asm = "fence" elif name == "FENCEI": asm = "fence.i" elif category == "LOAD": asm += " {}, {}({})".format(destination, self.get_imm(), src1) elif category == "CSR": asm += " {}, {}, {}".format(destination, hex(csr), self.get_imm()) else: asm += " {}, {}, {}".format(destination, src1, self.get_imm()) elif format == "S_FORMAT" or format == "B_FORMAT": if category == "STORE": asm += " {}, {}({})".format(src2, self.get_imm(), src1) else: asm += " {}, {}, {}".format(src1, src2, self.get_imm()) elif format == "R_FORMAT": if category == "CSR": asm += " {}, {}, {}".format(destination, hex(csr), src1) else: asm += " {}, {}, {}".format(destination, src1, src2) else: if name == "BREAK": asm = ".option norvc;ebreak;.option rvc;" if self.comment != "": asm += " # {}".format(self.comment) return asm.lower()
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def convert2bin(self, sol): name = sol[self.instr_name] format = sol[self.instr_format] imm = sol[self.imm] rd = sol[self.instr_rd] if format == "J_FORMAT": binary = ""
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def get_imm(self): return self.imm_str
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def default_c(is_pseudo_instr): if not is_pseudo_instr: return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def fence_c(name, source1, destination, imm): if name == "FENCE" or name == "FENCEI": if source1 == "ZERO" and destination == "ZERO" and imm == 0: return True else: return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def nop_c(name, source1, source2, destination): if name == "NOP": if source1 == "ZERO" and source2 == "ZERO" and destination == "ZERO": return True else: return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def imm_len_c(format, imm_t, imm_length): if format == "U_FORMAT" or format == "J_FORMAT": return imm_length == 20 elif format == "I_FORMAT" or format == "S_FORMAT" or format == "B_FORMAT": if imm_t == "UIMM": return imm_length == 5 else: return imm_length == 11 else: return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def shift_imm_val_c(category, imm): if category == "SHIFT": return imm < utils.XLEN else: return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def non_system(category): if category != "SYSTEM": return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def non_synch(category): if category != "SYNCH": return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def no_load_store_c(category): if category != "LOAD" and category != "STORE": return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def randomize(self): # old randomize() # self.solution = self.problem.getSolution() # self.post_randomize() self.solution = self.problem.getSolution() if self.solution: # print("TODO: randomized with steps: {}".format(self.problem._solver._steps)) pass else: i = 1 while self.solution is None: for j in range(10): self.solution = self.problem.getSolution() if self.solution: # print("TODO: randomized with steps: {}".format(self.problem._solver._steps)) break i *= 5 self.problem._solver._steps *= i self.post_randomize()
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def __init__(self, name=""): # calling super constructor riscv_instr_base.__init__(self, name) # Important: Constraint solver gets too slow in pseudo class. We have three solutions: # 1- change the type of the constraint solver, from MinConflict to regular, this one # also takes fairly good amount of time, but it's good for validity check, to see # if constraints are valid and there is no conflict between them. # 2- Increase the number of steps for MinConflict... # 3- Since we don't need to check the name_c constraint here, we can get rid of it # for pseudo class! We're going to use this option for now # self.problem = constraint.Problem(constraint.MinConflictsSolver(steps=10000)) # self.problem = constraint.Problem() self.process_load_store = 0 self.pseudo_instr_name = "Pseudo instruction name"
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def pseudo_name_c(name, group, format, category): condition = (((name == "LI" or name == "LA") and group == "RV32I" and format == "I_FORMAT" and category == "LOAD")) if condition: return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def default_c(is_pseudo_instr): if is_pseudo_instr: return True
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def convert2asm(self): asm_str = self.get_instr_name() destination = self.solution[self.instr_rd] # instr rd,imm asm_str = "{} {}, {}".format(asm_str, destination, self.get_imm()) if self.comment != "": asm_str = asm_str + " #" + self.comment return asm_str.lower()
lowRISC/ibex
[ 932, 400, 932, 152, 1502194596 ]
def testA(): # test A assert packet.pack('A','C') == b'AC' assert packet.pack('A','B') == b'AB' assert packet.pack('A','C') != b'AB' assert packet.unpack_stream(io.BytesIO(b'AC')) == ('A', 'C') assert packet.unpack_stream(io.BytesIO(b'AB')) == ('A', 'B') assert packet.unpack_stream(io.BytesIO(b'AB')) != ('A', 'C')
mcdeoliveira/ctrl
[ 11, 6, 11, 1, 1435356913 ]
def testS(): # test S assert packet.pack('S','abc') == struct.pack('<cI3s', b'S', 3, b'abc') assert packet.pack('S','abcd') != struct.pack('<cI3s', b'S', 3, b'abc') assert packet.unpack_stream( io.BytesIO(struct.pack('<cI3s', b'S', 3, b'abc'))) == ('S', 'abc') assert packet.unpack_stream( io.BytesIO(struct.pack('<cI3s', b'S', 3, b'abc'))) != ('S', 'abcd')
mcdeoliveira/ctrl
[ 11, 6, 11, 1, 1435356913 ]
def testV(): # test VI vector = numpy.array((1,2,3), int) assert packet.pack('V',vector) == struct.pack('<ccIiii', b'V', b'I', 3, 1, 2, 3) (type, rvector) = packet.unpack_stream( io.BytesIO(struct.pack('<ccIiii', b'V', b'I', 3, 1, 2, 3))) assert type == 'V' assert numpy.all(rvector == vector) vector = numpy.array((1,-2,3), int) assert packet.pack('V',vector) == struct.pack('<ccIiii', b'V', b'I', 3, 1, -2, 3) (type, rvector) = packet.unpack_stream( io.BytesIO(struct.pack('<ccIiii', b'V', b'I', 3, 1, -2, 3))) assert type == 'V' assert numpy.all(rvector == vector) # test VF vector = numpy.array((1.3,-2,3), numpy.float32) assert packet.pack('V',vector) == struct.pack('<ccIfff', b'V', b'F', 3, 1.3, -2, 3) (type, rvector) = packet.unpack_stream( io.BytesIO(struct.pack('<ccIfff', b'V', b'F', 3, 1.3, -2, 3))) assert type == 'V' assert numpy.all(rvector == vector) # test VD vector = numpy.array((1.3,-2,3), float) assert packet.pack('V',vector) == struct.pack('<ccIddd', b'V', b'D', 3, 1.3, -2, 3) (type, rvector) = packet.unpack_stream( io.BytesIO(struct.pack('<ccIddd', b'V', b'D', 3, 1.3, -2, 3))) assert type == 'V' assert numpy.all(rvector == vector)
mcdeoliveira/ctrl
[ 11, 6, 11, 1, 1435356913 ]
def testP(): vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float) string = packet.pack('P', vector) (type, rvector) = packet.unpack_stream(io.BytesIO(string)) assert type == 'P' assert numpy.all(rvector == vector)
mcdeoliveira/ctrl
[ 11, 6, 11, 1, 1435356913 ]
def __init__(self, device, cmdline_file): """Initializes the FlagChanger and records the original arguments. Args: device: A DeviceUtils instance. cmdline_file: Path to the command line file on the device. """ self._device = device self._cmdline_file = cmdline_file # Save the original flags. try: self._orig_line = self._device.ReadFile(self._cmdline_file).strip() except device_errors.CommandFailedError: self._orig_line = '' # Parse out the flags into a list to facilitate adding and removing flags. self._current_flags = self._TokenizeFlags(self._orig_line)
Teamxrtc/webrtc-streaming-node
[ 6, 5, 6, 2, 1449773735 ]
def Set(self, flags): """Replaces all flags on the current command line with the flags given. Args: flags: A list of flags to set, eg. ['--single-process']. """ if flags: assert flags[0] != 'chrome' self._current_flags = flags self._UpdateCommandLineFile()
Teamxrtc/webrtc-streaming-node
[ 6, 5, 6, 2, 1449773735 ]
def RemoveFlags(self, flags): """Removes flags from the command line, if they exist. Args: flags: A list of flags to remove, eg. ['--single-process']. Note that we expect a complete match when removing flags; if you want to remove a switch with a value, you must use the exact string used to add it in the first place. """ if flags: assert flags[0] != 'chrome' for flag in flags: if flag in self._current_flags: self._current_flags.remove(flag) self._UpdateCommandLineFile()
Teamxrtc/webrtc-streaming-node
[ 6, 5, 6, 2, 1449773735 ]
def _UpdateCommandLineFile(self): """Writes out the command line to the file, or removes it if empty.""" logging.info('Current flags: %s', self._current_flags) # Root is not required to write to /data/local/tmp/. use_root = '/data/local/tmp/' not in self._cmdline_file if self._current_flags: # The first command line argument doesn't matter as we are not actually # launching the chrome executable using this command line. cmd_line = ' '.join(['_'] + self._current_flags) self._device.WriteFile( self._cmdline_file, cmd_line, as_root=use_root) file_contents = self._device.ReadFile( self._cmdline_file, as_root=use_root).rstrip() assert file_contents == cmd_line, ( 'Failed to set the command line file at %s' % self._cmdline_file) else: self._device.RunShellCommand('rm ' + self._cmdline_file, as_root=use_root) assert not self._device.FileExists(self._cmdline_file), ( 'Failed to remove the command line file at %s' % self._cmdline_file)
Teamxrtc/webrtc-streaming-node
[ 6, 5, 6, 2, 1449773735 ]
def generate(data): data['correct_answers']['x'] = 3
PrairieLearn/PrairieLearn
[ 246, 250, 246, 871, 1400730536 ]
def test_trigger(self) -> None: expected_message = 'Incident [3](https://zulip-test.pagerduty.com/incidents/P140S4Y) triggered by [Test service](https://zulip-test.pagerduty.com/services/PIL5CUQ) (assigned to [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ)):\n\n``` quote\nfoo\n```' self.send_and_test_stream_message('trigger', "Incident 3", expected_message)
timabbott/zulip
[ 2, 7, 2, 1, 1443209656 ]
def test_trigger_without_assignee_v2(self) -> None: expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) triggered by [Production XDB Cluster](https://webdemo.pagerduty.com/services/PN49J75) (assigned to nobody):\n\n``` quote\nMy new incident\n```' self.send_and_test_stream_message('trigger_without_assignee_v2', 'Incident 33', expected_message)
timabbott/zulip
[ 2, 7, 2, 1, 1443209656 ]
def test_resolved(self) -> None: expected_message = 'Incident [1](https://zulip-test.pagerduty.com/incidents/PO1XIJ5) resolved by [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ):\n\n``` quote\nIt is on fire\n```' self.send_and_test_stream_message('resolved', "Incident 1", expected_message)
timabbott/zulip
[ 2, 7, 2, 1, 1443209656 ]
def test_auto_resolved(self) -> None: expected_message = 'Incident [2](https://zulip-test.pagerduty.com/incidents/PX7K9J2) resolved:\n\n``` quote\nnew\n```' self.send_and_test_stream_message('auto_resolved', "Incident 2", expected_message)
timabbott/zulip
[ 2, 7, 2, 1, 1443209656 ]
def test_acknowledge_without_trigger_summary_data(self) -> None: expected_message = 'Incident [1](https://zulip-test.pagerduty.com/incidents/PO1XIJ5) acknowledged by [armooo](https://zulip-test.pagerduty.com/users/POBCFRJ):\n\n``` quote\n\n```' self.send_and_test_stream_message('acknowledge_without_trigger_summary_data', "Incident 1", expected_message)
timabbott/zulip
[ 2, 7, 2, 1, 1443209656 ]
def test_incident_assigned_v2(self) -> None: expected_message = 'Incident [33](https://webdemo.pagerduty.com/incidents/PRORDTY) assigned to [Wiley Jacobson](https://webdemo.pagerduty.com/users/PFBSJ2Z):\n\n``` quote\nMy new incident\n```' self.send_and_test_stream_message('assign_v2', 'Incident 33', expected_message)
timabbott/zulip
[ 2, 7, 2, 1, 1443209656 ]
def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the Demo covers.""" add_devices([ DemoCover(hass, 'Kitchen Window'), DemoCover(hass, 'Hall Window', 10), DemoCover(hass, 'Living Room Window', 70, 50), ])
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def __init__(self, hass, name, position=None, tilt_position=None): """Initialize the cover.""" self.hass = hass self._name = name self._position = position self._set_position = None self._set_tilt_position = None self._tilt_position = tilt_position self._closing = True self._closing_tilt = True self._unsub_listener_cover = None self._unsub_listener_cover_tilt = None
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def name(self): """Return the name of the cover.""" return self._name
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def should_poll(self): """No polling needed for a demo cover.""" return False
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def current_cover_position(self): """Return the current position of the cover.""" return self._position
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def current_cover_tilt_position(self): """Return the current tilt position of the cover.""" return self._tilt_position
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def is_closed(self): """Return if the cover is closed.""" if self._position is not None: if self.current_cover_position > 0: return False else: return True else: return None
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def close_cover_tilt(self, **kwargs): """Close the cover tilt.""" if self._tilt_position in (0, None): return self._listen_cover_tilt() self._closing_tilt = True
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def open_cover_tilt(self, **kwargs): """Open the cover tilt.""" if self._tilt_position in (100, None): return self._listen_cover_tilt() self._closing_tilt = False
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def set_cover_tilt_position(self, tilt_position, **kwargs): """Move the cover til to a specific position.""" self._set_tilt_position = round(tilt_position, -1) if self._tilt_position == tilt_position: return self._listen_cover_tilt() self._closing_tilt = tilt_position < self._tilt_position
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def stop_cover_tilt(self, **kwargs): """Stop the cover tilt.""" if self._tilt_position is None: return if self._unsub_listener_cover_tilt is not None: self._unsub_listener_cover_tilt() self._unsub_listener_cover_tilt = None self._set_tilt_position = None
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def _time_changed_cover(self, now): """Track time changes.""" if self._closing: self._position -= 10 else: self._position += 10 if self._position in (100, 0, self._set_position): self.stop_cover() self.update_ha_state()
Smart-Torvy/torvy-home-assistant
[ 1, 1, 1, 2, 1460403687 ]
def setUp(self): super().setUp() self.adapter = DOTAdapter() self.user = UserFactory() self.client = self.adapter.create_public_client( name='public app', user=self.user, redirect_uri=DUMMY_REDIRECT_URL, client_id='public-client-id', )
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def test_create_token_success(self): token = api.create_dot_access_token(HttpRequest(), self.user, self.client) assert token['access_token'] assert token['refresh_token'] self.assertDictContainsSubset( { 'token_type': 'Bearer', 'expires_in': EXPECTED_DEFAULT_EXPIRES_IN, 'scope': '', }, token, ) self._assert_stored_token(token['access_token'], self.user, self.client)
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def add_filters(self, **kwargs): """ Returns the queryset with new filters """ return super(AOIManager, self).get_query_set().filter(**kwargs)
stephenrjones/geoq
[ 4, 14, 4, 3, 1398889424 ]
def assigned(self): """ Returns assigned AOIs. """ return self.add_filters(status='Assigned')
stephenrjones/geoq
[ 4, 14, 4, 3, 1398889424 ]
def submitted(self): """ Returns submitted AOIs. """ return self.add_filters(status='Submitted')
stephenrjones/geoq
[ 4, 14, 4, 3, 1398889424 ]
def test_create_and_delete_directory(self): file_io.create_dir_v2('ram://testdirectory') file_io.delete_recursively_v2('ram://testdirectory')
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def test_write_file(self): with gfile.GFile('ram://a.txt', 'w') as f: f.write('Hello, world.') f.write('Hello, world.') with gfile.GFile('ram://a.txt', 'r') as f: self.assertEqual(f.read(), 'Hello, world.' * 2)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def test_list_dir(self): for i in range(10): with gfile.GFile('ram://a/b/%d.txt' % i, 'w') as f: f.write('') with gfile.GFile('ram://c/b/%d.txt' % i, 'w') as f: f.write('') matches = ['%d.txt' % i for i in range(10)] self.assertEqual(gfile.ListDirectory('ram://a/b/'), matches)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def test_file_exists(self): with gfile.GFile('ram://exists/a/b/c.txt', 'w') as f: f.write('') self.assertTrue(gfile.Exists('ram://exists/a')) self.assertTrue(gfile.Exists('ram://exists/a/b')) self.assertTrue(gfile.Exists('ram://exists/a/b/c.txt')) self.assertFalse(gfile.Exists('ram://exists/b')) self.assertFalse(gfile.Exists('ram://exists/a/c')) self.assertFalse(gfile.Exists('ram://exists/a/b/k'))
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def model_fn(features, labels, mode, params): del params x = core_layers.dense(features, 100) x = core_layers.dense(x, 100) x = core_layers.dense(x, 100) x = core_layers.dense(x, 100) y = core_layers.dense(x, 1) loss = losses.mean_squared_error(labels, y) opt = adam.AdamOptimizer(learning_rate=0.1) train_op = opt.minimize( loss, global_step=training_util.get_or_create_global_step()) return EstimatorSpec(mode=mode, loss=loss, train_op=train_op)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def test_savedmodel(self): class MyModule(module.Module): @def_function.function(input_signature=[]) def foo(self): return constant_op.constant([1]) saved_model.save(MyModule(), 'ram://my_module') loaded = saved_model.load('ram://my_module') self.assertAllEqual(loaded.foo(), [1])
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def KillAllAdb(): def get_all_adb(): for p in psutil.process_iter(): try: # Retrieve all required process infos at once. pinfo = p.as_dict(attrs=['pid', 'name', 'cmdline']) if pinfo['name'] == 'adb': pinfo['cmdline'] = ' '.join(pinfo['cmdline']) yield p, pinfo except (psutil.NoSuchProcess, psutil.AccessDenied): pass for sig in [signal.SIGTERM, signal.SIGQUIT, signal.SIGKILL]: for p, pinfo in get_all_adb(): try: pinfo['signal'] = sig logger.info('kill %(signal)s %(pid)s (%(name)s [%(cmdline)s])', pinfo) p.send_signal(sig) except (psutil.NoSuchProcess, psutil.AccessDenied): pass for _, pinfo in get_all_adb(): try: logger.error('Unable to kill %(pid)s (%(name)s [%(cmdline)s])', pinfo) except (psutil.NoSuchProcess, psutil.AccessDenied): pass
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]