Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
6,600
def test_patch_wont_create_by_default(self): try: @patch('%s.frooble' % builtin_string, sentinel.Frooble) def test(): self.assertEqual(frooble, sentinel.Frooble) test() except __HOLE__: pass else: self.fail('Patching non existent attributes should fail') self.assertRaises(NameError, lambda: frooble)
AttributeError
dataset/ETHPy150Open calvinchengx/python-mock/tests/testpatch.py/PatchTest.test_patch_wont_create_by_default
6,601
def test_patchobject_wont_create_by_default(self): try: @patch.object(SomeClass, 'frooble', sentinel.Frooble) def test(): self.fail('Patching non existent attributes should fail') test() except __HOLE__: pass else: self.fail('Patching non existent attributes should fail') self.assertFalse(hasattr(SomeClass, 'frooble'))
AttributeError
dataset/ETHPy150Open calvinchengx/python-mock/tests/testpatch.py/PatchTest.test_patchobject_wont_create_by_default
6,602
def test_patch_with_exception(self): foo = {} @patch.dict(foo, {'a': 'b'}) def test(): raise NameError('Konrad') try: test() except __HOLE__: pass else: self.fail('NameError not raised by test') self.assertEqual(foo, {})
NameError
dataset/ETHPy150Open calvinchengx/python-mock/tests/testpatch.py/PatchTest.test_patch_with_exception
6,603
@register.render_tag def set_ppm_permissions(context, token): """ Assigns a permissions dict to the given page instance, combining Django's permission for the page's model and a permission check against the instance itself calling the page's ``can_add``, ``can_change`` and ``can_delete`` custom methods. Used within the change list for pages, to implement permission checks for the navigation tree. """ page = context[token.split_contents()[1]] model = page.get_content_model() try: opts = model._meta except __HOLE__: if model is None: error = _("Could not load the model for the following page, " "was it removed?") obj = page else: # A missing inner Meta class usually means the Page model # hasn't been directly subclassed. error = _("An error occured with the following class. Does " "it subclass Page directly?") obj = model.__class__.__name__ raise ImproperlyConfigured(error + " '%s'" % obj) perm_name = opts.app_label + ".%s_" + opts.object_name.lower() request = context["request"] setattr(page, "perms", {}) for perm_type in ("add", "change", "delete"): perm = request.user.has_perm(perm_name % perm_type) perm = perm or getattr(model, "can_%s" % perm_type)(request) page.perms[perm_type] = perm return ""
AttributeError
dataset/ETHPy150Open hydroshare/hydroshare2/ga_resources/templatetags/geoanalytics_tags.py/set_ppm_permissions
6,604
@register.to_end_tag def ga_editable(parsed, context, token): """ Add the required HTML to the parsed content for in-line editing, such as the icon and edit form if the object is deemed to be editable - either it has an ``editable`` method which returns ``True``, or the logged in user has change permissions for the model. """ def parse_field(field): field = field.split(".") obj = context.get(field.pop(0), None) attr = field.pop() while field: obj = getattr(obj, field.pop(0)) return obj, attr fields = [parse_field(f) for f in token.split_contents()[1:]] if fields: fields = [f for f in fields if len(f) == 2 and f[0] is fields[0][0]] if not parsed.strip(): try: parsed = "".join([str(getattr(*field)) for field in fields]) except __HOLE__: pass if settings.INLINE_EDITING_ENABLED and fields and "request" in context: obj = fields[0][0] if isinstance(obj, Model) and is_editable(obj, context["request"]): field_names = ",".join([f[1] for f in fields]) context["editable_form"] = get_edit_form(obj, field_names) context["original"] = parsed t = get_template("includes/ga_editable_form.html") return t.render(Context(context)) return parsed
AttributeError
dataset/ETHPy150Open hydroshare/hydroshare2/ga_resources/templatetags/geoanalytics_tags.py/ga_editable
6,605
def _fetch_node(self, path, normalized=False): if not normalized: normed_path = self.normpath(path) else: normed_path = path try: return self._reverse_mapping[normed_path] except __HOLE__: raise exc.NotFound("Path '%s' not found" % path)
KeyError
dataset/ETHPy150Open openstack/taskflow/taskflow/persistence/backends/impl_memory.py/FakeFilesystem._fetch_node
6,606
def find_project_root(contents): MANIFEST = 'appinfo.json' SRC_DIR = 'src/' for base_dir in contents: print base_dir try: dir_end = base_dir.index(MANIFEST) print dir_end except __HOLE__: continue else: if dir_end + len(MANIFEST) != len(base_dir): print 'failed' continue base_dir = base_dir[:dir_end] print base_dir for source_dir in contents: if source_dir[:dir_end] != base_dir: continue if not source_dir.endswith('.c') and not source_dir.endswith('.js'): continue if source_dir[dir_end:dir_end+len(SRC_DIR)] != SRC_DIR: continue break else: continue break else: raise Exception(_("No project root found.")) return base_dir
ValueError
dataset/ETHPy150Open pebble/cloudpebble/ide/utils/project.py/find_project_root
6,607
def op_checksig(stack, signature_for_hash_type_f, expected_hash_type, tmp_script, flags): try: pair_blob = stack.pop() sig_blob = stack.pop() verify_strict = not not (flags & VERIFY_STRICTENC) # if verify_strict flag is set, we fail the script immediately on bad encoding if verify_strict: check_public_key_encoding(pair_blob) sig_pair, signature_type = parse_signature_blob(sig_blob, flags) public_pair = sec_to_public_pair(pair_blob, strict=verify_strict) except (der.UnexpectedDER, __HOLE__, EncodingError): stack.append(VCH_FALSE) return if expected_hash_type not in (None, signature_type): raise ScriptError("wrong hash type") # Drop the signature, since there's no way for a signature to sign itself # see: Bitcoin Core/script/interpreter.cpp::EvalScript() tmp_script = delete_subscript(tmp_script, bin_script([sig_blob])) signature_hash = signature_for_hash_type_f(signature_type, script=tmp_script) if ecdsa.verify(ecdsa.generator_secp256k1, public_pair, signature_hash, sig_pair): stack.append(VCH_TRUE) else: stack.append(VCH_FALSE)
ValueError
dataset/ETHPy150Open richardkiss/pycoin/pycoin/tx/script/check_signature.py/op_checksig
6,608
def sig_blob_matches(sig_blobs, public_pair_blobs, tmp_script, signature_for_hash_type_f, flags, exit_early=False): """ sig_blobs: signature blobs public_pair_blobs: a list of public pair blobs tmp_script: the script as of the last code separator signature_for_hash_type_f: signature_for_hash_type_f flags: verification flags to apply exit_early: if True, we may exit early if one of the sig_blobs is incorrect or misplaced. Used for checking a supposedly validated transaction. A -1 indicates no match. Returns a list of indices into public_pairs. If exit_early is True, it may return early. If sig_blob_indices isn't long enough or contains a -1, the signature is not valid. """ strict_encoding = not not (flags & VERIFY_STRICTENC) # Drop the signatures, since there's no way for a signature to sign itself for sig_blob in sig_blobs: tmp_script = delete_subscript(tmp_script, bin_script([sig_blob])) sig_cache = {} sig_blob_indices = [] ppb_idx = -1 while sig_blobs and len(sig_blobs) <= len(public_pair_blobs): if exit_early and -1 in sig_blob_indices: break sig_blob, sig_blobs = sig_blobs[0], sig_blobs[1:] try: sig_pair, signature_type = parse_signature_blob(sig_blob, flags) except (der.UnexpectedDER, __HOLE__): sig_blob_indices.append(-1) continue if signature_type not in sig_cache: sig_cache[signature_type] = signature_for_hash_type_f(signature_type, script=tmp_script) try: ppp = ecdsa.possible_public_pairs_for_signature( ecdsa.generator_secp256k1, sig_cache[signature_type], sig_pair) except ecdsa.NoSuchPointError: ppp = [] while len(sig_blobs) < len(public_pair_blobs): public_pair_blob, public_pair_blobs = public_pair_blobs[0], public_pair_blobs[1:] ppb_idx += 1 if strict_encoding: check_public_key_encoding(public_pair_blob) try: public_pair = sec_to_public_pair(public_pair_blob, strict=strict_encoding) except EncodingError: public_pair = None if public_pair in ppp: sig_blob_indices.append(ppb_idx) break else: sig_blob_indices.append(-1) return sig_blob_indices
ValueError
dataset/ETHPy150Open richardkiss/pycoin/pycoin/tx/script/check_signature.py/sig_blob_matches
6,609
def parse_time(t): ''' Parse a time value from a string, return a float in Unix epoch format. If no time can be parsed from the string, return None. ''' try: return float(t) except __HOLE__: match = cal.parse(t) if match and match[1]: return time.mktime( match[0] ) return None
ValueError
dataset/ETHPy150Open agoragames/torus/torus/util.py/parse_time
6,610
def find_backwards(self, offset): try: for _, token_type, token_value in \ reversed(self.tokens[self.offset:offset]): if token_type in ('comment', 'linecomment'): try: prefix, comment = token_value.split(None, 1) except __HOLE__: continue if prefix in self.comment_tags: return [comment.rstrip()] return [] finally: self.offset = offset
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/jinja2-2.6/jinja2/ext.py/_CommentFinder.find_backwards
6,611
def start_application(application): """Callable to be executed in the subprocess.""" application.listen(PORT) try: tornado.ioloop.IOLoop.instance().start() except __HOLE__: # Exit cleanly sys.exit(0)
KeyboardInterrupt
dataset/ETHPy150Open owtf/owtf/tests/testing_framework/server.py/start_application
6,612
def run(self): if self.args['<command>'].lower() == "help": self.display_help() else: cmdname = self.args['<command>'] if cmdname not in self.commands: print("Unknown command: %r" % cmdname) self.display_help() sys.exit(1) # parse command arguments cmd = self.commands[cmdname] cmd_argv = [cmdname] + self.args['<args>'] cmd_args = docopt(cmd.__doc__, argv=cmd_argv) try: config = Config(self.args) except __HOLE__ as e: sys.stderr.write("config error: %s" % str(e)) sys.exit(1) # finally launch the command cmd = self.commands[cmdname] try: return cmd.run(config, cmd_args) except GafferUnauthorized: print("Unauthorized access. You need an API key") sys.exit(1) except GafferForbidden: print("Forbidden access. API key permissions aren't enough") sys.exit(1) except tornado.httpclient.HTTPError as e: print("HTTP Error: %s\n" % str(e)) sys.exit(1) except RuntimeError as e: sys.stderr.write("%s\n" % str(e)) sys.exit(1) except Exception as e: import traceback print(traceback.format_exc()) sys.stderr.write("%s\n" % str(e)) sys.exit(1) sys.exit(0)
RuntimeError
dataset/ETHPy150Open benoitc/gaffer/gaffer/cli/main.py/GafferCli.run
6,613
def get(self, licenseName): try: lic = self.dispatcher.licenseManager.licenses[licenseName] licenseRepr = "{'max':%s, 'used':%s, 'rns':[" % (str(lic.maximum), str(lic.used)) for rn in sorted(lic.currentUsingRenderNodes): licenseRepr += "\"%s\"," % rn.name licenseRepr += "]}" self.writeCallback(licenseRepr) except __HOLE__: raise ResourceNotFoundError #@queue
KeyError
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/octopus/dispatcher/webservice/licenses.py/LicenseResource.get
6,614
def put(self, licenseName): data = self.getBodyAsJSON() try: maxLic = data['maxlic'] except __HOLE__: raise Http404("Missing entry : 'maxlic'") else: self.dispatcher.licenseManager.setMaxLicensesNumber(licenseName, maxLic) self.writeCallback("OK") #@queue
KeyError
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/octopus/dispatcher/webservice/licenses.py/LicenseResource.put
6,615
def delete(self, licenseName): data = self.getBodyAsJSON() try: rns = data['rns'] except __HOLE__: raise Http404("Missing entry : 'rns'") else: rnsList = rns.split(",") for rnName in rnsList: if rnName in self.dispatcher.dispatchTree.renderNodes: rn = self.dispatcher.dispatchTree.renderNodes[rnName] else: raise Http500("Internal Server Error: Render node %s is not registered." % (rnName)) self.dispatcher.licenseManager.releaseLicenseForRenderNode(licenseName, rn) self.writeCallback("OK")
KeyError
dataset/ETHPy150Open mikrosimage/OpenRenderManagement/src/octopus/dispatcher/webservice/licenses.py/LicenseResource.delete
6,616
def _from_simple(self, color): try: color = color.lower() color = color.replace(' ','') color = color.replace('_','') except __HOLE__: pass if color == 'reset': return elif color in _lower_camel_names[:16]: self.number = _lower_camel_names.index(color) self.rgb = from_html(color_html[self.number]) elif isinstance(color, int) and 0 <= color < 16: self.number = color self.rgb = from_html(color_html[color]) else: raise ColorNotFound("Did not find color: " + repr(color)) self.representation = 2 self._init_number()
AttributeError
dataset/ETHPy150Open tomerfiliba/plumbum/plumbum/colorlib/styles.py/Color._from_simple
6,617
def _from_full(self, color): try: color = color.lower() color = color.replace(' ','') color = color.replace('_','') except __HOLE__: pass if color == 'reset': return elif color in _lower_camel_names: self.number = _lower_camel_names.index(color) self.rgb = from_html(color_html[self.number]) elif isinstance(color, int) and 0 <= color <= 255: self.number = color self.rgb = from_html(color_html[color]) else: raise ColorNotFound("Did not find color: " + repr(color)) self.representation = 3 self._init_number()
AttributeError
dataset/ETHPy150Open tomerfiliba/plumbum/plumbum/colorlib/styles.py/Color._from_full
6,618
def _from_hex(self, color): try: self.rgb = from_html(color) except (__HOLE__, ValueError): raise ColorNotFound("Did not find htmlcode: " + repr(color)) self.representation = 4 self._init_number()
TypeError
dataset/ETHPy150Open tomerfiliba/plumbum/plumbum/colorlib/styles.py/Color._from_hex
6,619
def add_ansi(self, sequence, filter_resets = False): """Adds a sequence of ansi numbers to the class. Will ignore resets if filter_resets is True.""" values = iter(sequence) try: while True: value = next(values) if value == 38 or value == 48: fg = value == 38 value = next(values) if value == 5: value = next(values) if fg: self.fg = self.color_class.from_full(value) else: self.bg = self.color_class.from_full(value, fg=False) elif value == 2: r = next(values) g = next(values) b = next(values) if fg: self.fg = self.color_class(r, g, b) else: self.bg = self.color_class(r, g, b, fg=False) else: raise ColorNotFound("the value 5 or 2 should follow a 38 or 48") elif value==0: if filter_resets is False: self.isreset = True elif value in attributes_ansi.values(): for name in attributes_ansi: if value == attributes_ansi[name]: self.attributes[name] = True elif value in (20+n for n in attributes_ansi.values()): if filter_resets is False: for name in attributes_ansi: if value == attributes_ansi[name] + 20: self.attributes[name] = False elif 30 <= value <= 37: self.fg = self.color_class.from_simple(value-30) elif 40 <= value <= 47: self.bg = self.color_class.from_simple(value-40, fg=False) elif 90 <= value <= 97: self.fg = self.color_class.from_simple(value-90+8) elif 100 <= value <= 107: self.bg = self.color_class.from_simple(value-100+8, fg=False) elif value == 39: if filter_resets is False: self.fg = self.color_class() elif value == 49: if filter_resets is False: self.bg = self.color_class(fg=False) else: raise ColorNotFound("The code {0} is not recognised".format(value)) except __HOLE__: return
StopIteration
dataset/ETHPy150Open tomerfiliba/plumbum/plumbum/colorlib/styles.py/Style.add_ansi
6,620
def read_package_name_from_setup_py(path): try: setup_py = os.path.join (path, "setup.py") if os.path.exists (setup_py): cmd = [sys.executable, "-S", "-s", setup_py, "--name", "--version"] env = { "PYTHONPATH" : ":".join (sys.path) } proc = subprocess.Popen(cmd, env = env, stdout=subprocess.PIPE) result = proc.communicate() vv = result[0] if proc.returncode != 0: raise Exception("'%s' failed with return code '%d'" % (" ".join(cmd), proc.returncode)) # check_output was added in python 2.7. Use it when available #vv = subprocess.check_output([sys.executable, setup_py, "--name", "--version"]) return parse_setup_py_version_output(vv) except (Exception, __HOLE__): LOG.exception("Unable to run '%s'", setup_py)
IOError
dataset/ETHPy150Open zillow/buildout-platform-versions/buildout_platform_versions/__init__.py/read_package_name_from_setup_py
6,621
def read_package_name_from_pkg_resources(path): try: vv = pkg_resources.find_distributions(path) if vv is not None: return vv.split() except (Exception, __HOLE__): LOG.exception("Unable to execute 'pkg_resources.find_distributions(%s)'" % path)
IOError
dataset/ETHPy150Open zillow/buildout-platform-versions/buildout_platform_versions/__init__.py/read_package_name_from_pkg_resources
6,622
def test_bad_path(self): try: job_id = self.create_dataset( prebuilt_train_images = '/not-a-directory' ) except __HOLE__: return raise AssertionError('Should have failed')
RuntimeError
dataset/ETHPy150Open NVIDIA/DIGITS/digits/dataset/images/generic/test_views.py/TestCreation.test_bad_path
6,623
def get_of_switches(self): ''' Obtain a a list of switches or DPID detected by this controller Return >=0, list: list length, and a list where each element a tuple pair (DPID, IP address) <0, text_error: if fails ''' try: of_response = requests.get(self.url+"/restconf/operational/opendaylight-inventory:nodes", headers=self.headers) error_text = "Openflow response %d: %s" % (of_response.status_code, of_response.text) if of_response.status_code != 200: self.logger.warning("get_of_switches " + error_text) return -1 , error_text self.logger.debug("get_of_switches " + error_text) info = of_response.json() if type(info) != dict: self.logger.error("get_of_switches. Unexpected response, not a dict: %s", str(info)) return -1, "Unexpected response, not a dict. Wrong version?" nodes = info.get('nodes') if type(nodes) is not dict: self.logger.error("get_of_switches. Unexpected response at 'nodes', not found or not a dict: %s", str(type(info))) return -1, "Unexpected response at 'nodes', not found or not a dict. Wrong version?" node_list = nodes.get('node') if type(node_list) is not list: self.logger.error("get_of_switches. Unexpected response, at 'nodes':'node', not found or not a list: %s", str(type(node_list))) return -1, "Unexpected response, at 'nodes':'node', not found or not a list. Wrong version?" switch_list=[] for node in node_list: node_id = node.get('id') if node_id is None: self.logger.error("get_of_switches. Unexpected response at 'nodes':'node'[]:'id', not found: %s", str(node)) return -1, "Unexpected response at 'nodes':'node'[]:'id', not found . Wrong version?" if node_id == 'controller-config': continue node_ip_address = node.get('flow-node-inventory:ip-address') if node_ip_address is None: self.logger.error("get_of_switches. Unexpected response at 'nodes':'node'[]:'flow-node-inventory:ip-address', not found: %s", str(node)) return -1, "Unexpected response at 'nodes':'node'[]:'flow-node-inventory:ip-address', not found. Wrong version?" node_id_hex=hex(int(node_id.split(':')[1])).split('x')[1].zfill(16) switch_list.append( (':'.join(a+b for a,b in zip(node_id_hex[::2], node_id_hex[1::2])), node_ip_address)) return len(switch_list), switch_list except (requests.exceptions.RequestException, __HOLE__) as e: #ValueError in the case that JSON can not be decoded error_text = type(e).__name__ + ": " + str(e) self.logger.error("get_of_switches " + error_text) return -1, error_text
ValueError
dataset/ETHPy150Open nfvlabs/openmano/openvim/ODL.py/OF_conn.get_of_switches
6,624
def obtain_port_correspondence(self): '''Obtain the correspondence between physical and openflow port names return: 0, dictionary: with physical name as key, openflow name as value -1, error_text: if fails ''' try: of_response = requests.get(self.url+"/restconf/operational/opendaylight-inventory:nodes", headers=self.headers) error_text = "Openflow response %d: %s" % (of_response.status_code, of_response.text) if of_response.status_code != 200: self.logger.warning("obtain_port_correspondence " + error_text) return -1 , error_text self.logger.debug("obtain_port_correspondence " + error_text) info = of_response.json() if type(info) != dict: self.logger.error("obtain_port_correspondence. Unexpected response not a dict: %s", str(info)) return -1, "Unexpected openflow response, not a dict. Wrong version?" nodes = info.get('nodes') if type(nodes) is not dict: self.logger.error("obtain_port_correspondence. Unexpected response at 'nodes', not found or not a dict: %s", str(type(nodes))) return -1, "Unexpected response at 'nodes',not found or not a dict. Wrong version?" node_list = nodes.get('node') if type(node_list) is not list: self.logger.error("obtain_port_correspondence. Unexpected response, at 'nodes':'node', not found or not a list: %s", str(type(node_list))) return -1, "Unexpected response, at 'nodes':'node', not found or not a list. Wrong version?" for node in node_list: node_id = node.get('id') if node_id is None: self.logger.error("obtain_port_correspondence. Unexpected response at 'nodes':'node'[]:'id', not found: %s", str(node)) return -1, "Unexpected response at 'nodes':'node'[]:'id', not found . Wrong version?" if node_id == 'controller-config': continue # Figure out if this is the appropriate switch. The 'id' is 'openflow:' plus the decimal value # of the dpid # In case this is not the desired switch, continue if self.id != node_id: continue node_connector_list = node.get('node-connector') if type(node_connector_list) is not list: self.logger.error("obtain_port_correspondence. Unexpected response at 'nodes':'node'[]:'node-connector', not found or not a list: %s", str(node)) return -1, "Unexpected response at 'nodes':'node'[]:'node-connector', not found or not a list. Wrong version?" for node_connector in node_connector_list: self.pp2ofi[ str(node_connector['flow-node-inventory:name']) ] = str(node_connector['id'] ) self.ofi2pp[ node_connector['id'] ] = str(node_connector['flow-node-inventory:name']) node_ip_address = node.get('flow-node-inventory:ip-address') if node_ip_address is None: self.logger.error("obtain_port_correspondence. Unexpected response at 'nodes':'node'[]:'flow-node-inventory:ip-address', not found: %s", str(node)) return -1, "Unexpected response at 'nodes':'node'[]:'flow-node-inventory:ip-address', not found. Wrong version?" self.ip_address = node_ip_address #If we found the appropriate dpid no need to continue in the for loop break #print self.name, ": obtain_port_correspondence ports:", self.pp2ofi return 0, self.pp2ofi except (requests.exceptions.RequestException, __HOLE__) as e: #ValueError in the case that JSON can not be decoded error_text = type(e).__name__ + ": " + str(e) self.logger.error("obtain_port_correspondence " + error_text) return -1, error_text
ValueError
dataset/ETHPy150Open nfvlabs/openmano/openvim/ODL.py/OF_conn.obtain_port_correspondence
6,625
def get_of_rules(self, translate_of_ports=True): ''' Obtain the rules inserted at openflow controller Params: translate_of_ports: if True it translates ports from openflow index to physical switch name Return: 0, dict if ok: with the rule name as key and value is another dictionary with the following content: priority: rule priority name: rule name (present also as the master dict key) ingress_port: match input port of the rule dst_mac: match destination mac address of the rule, can be missing or None if not apply vlan_id: match vlan tag of the rule, can be missing or None if not apply actions: list of actions, composed by a pair tuples: (vlan, None/int): for stripping/setting a vlan tag (out, port): send to this port switch: DPID, all -1, text_error if fails ''' if len(self.ofi2pp) == 0: r,c = self.obtain_port_correspondence() if r<0: return r,c #get rules try: of_response = requests.get(self.url+"/restconf/config/opendaylight-inventory:nodes/node/" + self.id + "/table/0", headers=self.headers) error_text = "Openflow response %d: %s" % (of_response.status_code, of_response.text) # The configured page does not exist if there are no rules installed. In that case we return an empty dict if of_response.status_code == 404: return 0, {} elif of_response.status_code != 200: self.logger.warning("get_of_rules " + error_text) return -1 , error_text self.logger.debug("get_of_rules " + error_text) info = of_response.json() if type(info) != dict: self.logger.error("get_of_rules. Unexpected response not a dict: %s", str(info)) return -1, "Unexpected openflow response, not a dict. Wrong version?" table = info.get('flow-node-inventory:table') if type(table) is not list: self.logger.error("get_of_rules. Unexpected response at 'flow-node-inventory:table', not a list: %s", str(type(table))) return -1, "Unexpected response at 'flow-node-inventory:table', not a list. Wrong version?" flow_list = table[0].get('flow') if flow_list is None: return 0, {} if type(flow_list) is not list: self.logger.error("get_of_rules. Unexpected response at 'flow-node-inventory:table'[0]:'flow', not a list: %s", str(type(flow_list))) return -1, "Unexpected response at 'flow-node-inventory:table'[0]:'flow', not a list. Wrong version?" #TODO translate ports according to translate_of_ports parameter rules = dict() for flow in flow_list: if not ('id' in flow and 'match' in flow and 'instructions' in flow and \ 'instruction' in flow['instructions'] and 'apply-actions' in flow['instructions']['instruction'][0] and \ 'action' in flow['instructions']['instruction'][0]['apply-actions']): return -1, "unexpected openflow response, one or more elements are missing. Wrong version?" flow['instructions']['instruction'][0]['apply-actions']['action'] rule = dict() rule['switch'] = self.dpid rule['priority'] = flow.get('priority') #rule['name'] = flow['id'] #rule['cookie'] = flow['cookie'] if 'in-port' in flow['match']: in_port = flow['match']['in-port'] if not in_port in self.ofi2pp: return -1, "Error: Ingress port "+in_port+" is not in switch port list" if translate_of_ports: in_port = self.ofi2pp[in_port] rule['ingress_port'] = in_port if 'vlan-match' in flow['match'] and 'vlan-id' in flow['match']['vlan-match'] and \ 'vlan-id' in flow['match']['vlan-match']['vlan-id'] and \ 'vlan-id-present' in flow['match']['vlan-match']['vlan-id'] and \ flow['match']['vlan-match']['vlan-id']['vlan-id-present'] == True: rule['vlan_id'] = flow['match']['vlan-match']['vlan-id']['vlan-id'] if 'ethernet-match' in flow['match'] and 'ethernet-destination' in flow['match']['ethernet-match'] and \ 'address' in flow['match']['ethernet-match']['ethernet-destination']: rule['dst_mac'] = flow['match']['ethernet-match']['ethernet-destination']['address'] instructions=flow['instructions']['instruction'][0]['apply-actions']['action'] max_index=0 for instruction in instructions: if instruction['order'] > max_index: max_index = instruction['order'] actions=[None]*(max_index+1) for instruction in instructions: if 'output-action' in instruction: if not 'output-node-connector' in instruction['output-action']: return -1, "unexpected openflow response, one or more elementa are missing. Wrong version?" out_port = instruction['output-action']['output-node-connector'] if not out_port in self.ofi2pp: return -1, "Error: Output port "+out_port+" is not in switch port list" if translate_of_ports: out_port = self.ofi2pp[out_port] actions[instruction['order']] = ('out',out_port) elif 'strip-vlan-action' in instruction: actions[instruction['order']] = ('vlan', None) elif 'set-field' in instruction: if not ('vlan-match' in instruction['set-field'] and 'vlan-id' in instruction['set-field']['vlan-match'] and 'vlan-id' in instruction['set-field']['vlan-match']['vlan-id']): return -1, "unexpected openflow response, one or more elements are missing. Wrong version?" actions[instruction['order']] = ('vlan', instruction['set-field']['vlan-match']['vlan-id']['vlan-id']) actions = [x for x in actions if x != None] rule['actions'] = list(actions) rules[flow['id']] = dict(rule) #flow['id'] #flow['priority'] #flow['cookie'] #flow['match']['in-port'] #flow['match']['vlan-match']['vlan-id']['vlan-id'] # match -> in-port # -> vlan-match -> vlan-id -> vlan-id #flow['match']['vlan-match']['vlan-id']['vlan-id-present'] #TODO se asume que no se usan reglas con vlan-id-present:false #instructions -> instruction -> apply-actions -> action #instructions=flow['instructions']['instruction'][0]['apply-actions']['action'] #Es una lista. Posibles elementos: #max_index=0 #for instruction in instructions: # if instruction['order'] > max_index: # max_index = instruction['order'] #actions=[None]*(max_index+1) #for instruction in instructions: # if 'output-action' in instruction: # actions[instruction['order']] = ('out',instruction['output-action']['output-node-connector']) # elif 'strip-vlan-action' in instruction: # actions[instruction['order']] = ('vlan', None) # elif 'set-field' in instruction: # actions[instruction['order']] = ('vlan', instruction['set-field']['vlan-match']['vlan-id']['vlan-id']) # #actions = [x for x in actions if x != None] # -> output-action -> output-node-connector # -> pop-vlan-action return 0, rules except (requests.exceptions.RequestException, __HOLE__) as e: #ValueError in the case that JSON can not be decoded error_text = type(e).__name__ + ": " + str(e) self.logger.error("get_of_rules " + error_text) return -1, error_text
ValueError
dataset/ETHPy150Open nfvlabs/openmano/openvim/ODL.py/OF_conn.get_of_rules
6,626
def build_filter(self, filter_expr, branch_negated=False, current_negated=False, can_reuse=None): """ Builds a WhereNode for a single filter clause, but doesn't add it to this Query. Query.add_q() will then add this filter to the where or having Node. The 'branch_negated' tells us if the current branch contains any negations. This will be used to determine if subqueries are needed. The 'current_negated' is used to determine if the current filter is negated or not and this will be used to determine if IS NULL filtering is needed. The difference between current_netageted and branch_negated is that branch_negated is set on first negation, but current_negated is flipped for each negation. Note that add_filter will not do any negating itself, that is done upper in the code by add_q(). The 'can_reuse' is a set of reusable joins for multijoins. The method will create a filter clause that can be added to the current query. However, if the filter isn't added to the query then the caller is responsible for unreffing the joins used. """ arg, value = filter_expr parts = arg.split(LOOKUP_SEP) if not parts: raise FieldError("Cannot parse keyword query %r" % arg) # Work out the lookup type and remove it from the end of 'parts', # if necessary. lookup_type = 'exact' # Default lookup type num_parts = len(parts) if (len(parts) > 1 and parts[-1] in self.query_terms and arg not in self.aggregates): # Traverse the lookup query to distinguish related fields from # lookup types. lookup_model = self.model for counter, field_name in enumerate(parts): try: lookup_field = lookup_model._meta.get_field(field_name) except FieldDoesNotExist: # Not a field. Bail out. lookup_type = parts.pop() break # Unless we're at the end of the list of lookups, let's attempt # to continue traversing relations. if (counter + 1) < num_parts: try: lookup_model = lookup_field.rel.to except __HOLE__: # Not a related field. Bail out. lookup_type = parts.pop() break clause = self.where_class() # Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all # uses of None as a query value. if value is None: if lookup_type != 'exact': raise ValueError("Cannot use None as a query value") lookup_type = 'isnull' value = True elif callable(value): value = value() elif isinstance(value, ExpressionNode): # If value is a query expression, evaluate it value = SQLEvaluator(value, self, reuse=can_reuse) # For Oracle '' is equivalent to null. The check needs to be done # at this stage because join promotion can't be done at compiler # stage. Using DEFAULT_DB_ALIAS isn't nice, but it is the best we # can do here. Similar thing is done in is_nullable(), too. if (connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and lookup_type == 'exact' and value == ''): value = True lookup_type = 'isnull' for alias, aggregate in self.aggregates.items(): if alias in (parts[0], LOOKUP_SEP.join(parts)): clause.add((aggregate, lookup_type, value), AND) return clause opts = self.get_meta() alias = self.get_initial_alias() allow_many = not branch_negated try: field, sources, opts, join_list, path = self.setup_joins( parts, opts, alias, can_reuse, allow_many, allow_explicit_fk=True) if can_reuse is not None: can_reuse.update(join_list) # split_exclude() needs to know which joins were generated for the # lookup parts self._lookup_joins = join_list except MultiJoin as e: return self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]), can_reuse, e.names_with_path) if (lookup_type == 'isnull' and value is True and not current_negated and len(join_list) > 1): # If the comparison is against NULL, we may need to use some left # outer joins when creating the join chain. This is only done when # needed, as it's less efficient at the database level. self.promote_joins(join_list) # Process the join list to see if we can remove any inner joins from # the far end (fewer tables in a query is better). Note that join # promotion must happen before join trimming to have the join type # information available when reusing joins. targets, alias, join_list = self.trim_joins(sources, join_list, path) if hasattr(field, 'get_lookup_constraint'): constraint = field.get_lookup_constraint(self.where_class, alias, targets, sources, lookup_type, value) else: constraint = (Constraint(alias, targets[0].column, field), lookup_type, value) clause.add(constraint, AND) if current_negated and (lookup_type != 'isnull' or value is False): self.promote_joins(join_list) if (lookup_type != 'isnull' and ( self.is_nullable(targets[0]) or self.alias_map[join_list[-1]].join_type == self.LOUTER)): # The condition added here will be SQL like this: # NOT (col IS NOT NULL), where the first NOT is added in # upper layers of code. The reason for addition is that if col # is null, then col != someval will result in SQL "unknown" # which isn't the same as in Python. The Python None handling # is wanted, and it can be gotten by # (col IS NULL OR col != someval) # <=> # NOT (col IS NOT NULL AND col = someval). clause.add((Constraint(alias, targets[0].column, None), 'isnull', False), AND) return clause
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/sql/query.py/Query.build_filter
6,627
def get_loaded_field_names(self): """ If any fields are marked to be deferred, returns a dictionary mapping models to a set of names in those fields that will be loaded. If a model is not in the returned dictionary, none of it's fields are deferred. If no fields are marked for deferral, returns an empty dictionary. """ # We cache this because we call this function multiple times # (compiler.fill_related_selections, query.iterator) try: return self._loaded_field_names_cache except __HOLE__: collection = {} self.deferred_to_data(collection, self.get_loaded_field_names_cb) self._loaded_field_names_cache = collection return collection
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Django-1.6.10/django/db/models/sql/query.py/Query.get_loaded_field_names
6,628
def _create_hosts_whitelist(host_list, port): if not host_list: return ['localhost:' + str(port)] hosts = [] for host in host_list: parts = host.split(':') if len(parts) == 1: if parts[0] == "": raise ValueError("Empty host value") hosts.append(host+":80") elif len(parts) == 2: try: int(parts[1]) except __HOLE__: raise ValueError("Invalid port in host value: %s" % host) if parts[0] == "": raise ValueError("Empty host value") hosts.append(host) else: raise ValueError("Invalid host value: %s" % host) return hosts
ValueError
dataset/ETHPy150Open bokeh/bokeh/bokeh/server/server.py/_create_hosts_whitelist
6,629
def __init__(self, applications, **kwargs): log.info("Starting Bokeh server version %s" % __version__) if isinstance(applications, Application): self._applications = { '/' : applications } else: self._applications = applications tornado_kwargs = { key: kwargs[key] for key in ['io_loop', 'develop', 'extra_patterns', 'secret_key', 'sign_sessions', 'generate_session_ids', 'keep_alive_milliseconds', 'check_unused_sessions_milliseconds', 'unused_session_lifetime_milliseconds', 'stats_log_frequency_milliseconds'] if key in kwargs } prefix = kwargs.get('prefix', None) if prefix is None: prefix = "" prefix = prefix.strip("/") if prefix: prefix = "/" + prefix self._prefix = prefix self._port = DEFAULT_SERVER_PORT if 'port' in kwargs: self._port = kwargs['port'] tornado_kwargs['hosts'] = _create_hosts_whitelist(kwargs.get('host', None), self._port) tornado_kwargs['extra_websocket_origins'] = _create_hosts_whitelist(kwargs.get('allow_websocket_origin', None), self._port) self._tornado = BokehTornado(self._applications, self.prefix, **tornado_kwargs) self._http = HTTPServer(self._tornado, xheaders=kwargs.get('use_xheaders', False)) self._address = None if 'address' in kwargs: self._address = kwargs['address'] # these queue a callback on the ioloop rather than # doing the operation immediately (I think - havocp) try: self._http.bind(self._port, address=self._address) self._http.start(1) except __HOLE__ as e: import errno if e.errno == errno.EADDRINUSE: log.critical("Cannot start Bokeh server, port %s is already in use", self._port) elif e.errno == errno.EADDRNOTAVAIL: log.critical("Cannot start Bokeh server, address '%s' not available", self._address) else: codename = errno.errorcode[e.errno] log.critical("Cannot start Bokeh server, %s %r", codename, e) sys.exit(1)
OSError
dataset/ETHPy150Open bokeh/bokeh/bokeh/server/server.py/Server.__init__
6,630
def titlecase_filter(text): """Support for titlecase.py's titlecasing >>> titlecase("this V that") u'This v That' >>> titlecase("this is just an example.com") u'This Is Just an example.com' """ text = force_unicode(text) try: import titlecase except __HOLE__: if settings.DEBUG: raise template.TemplateSyntaxError, "Error in {% titlecase %} filter: The titlecase.py library isn't installed." return text else: return titlecase.titlecase(text)
ImportError
dataset/ETHPy150Open callowayproject/django-typogrify/typogrify/templatetags/typogrify.py/titlecase_filter
6,631
def get_profile(self): """ Returns site-specific profile for this user. Raises SiteProfileNotAvailable if this site does not allow profiles. """ if not hasattr(self, '_profile_cache'): from django.conf import settings if not settings.AUTH_PROFILE_MODULE: raise SiteProfileNotAvailable try: app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.') model = models.get_model(app_label, model_name) self._profile_cache = model._default_manager.get(user__id__exact=self.id) except (__HOLE__, ImproperlyConfigured): raise SiteProfileNotAvailable return self._profile_cache
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/contrib/auth/models.py/User.get_profile
6,632
def _resources_from_module(module): for name in dir(module): o = getattr(module, name) try: base_classes = [ToolsResource, ToolsModelResource, TastyResource, TastyModelResource] is_base_class = o in base_classes is_resource_class = issubclass(o, TastyResource) if is_resource_class and not is_base_class: yield o except __HOLE__: pass
TypeError
dataset/ETHPy150Open mozilla/inventory/vendor-local/src/django-tastytools/tastytools/api.py/_resources_from_module
6,633
def _testdata_from_module(module): for name in dir(module): o = getattr(module, name) try: base_classes = [ResourceTestData] is_base_class = o in base_classes is_testdata_class = issubclass(o, ResourceTestData) if is_testdata_class and not is_base_class: yield o except __HOLE__: pass
TypeError
dataset/ETHPy150Open mozilla/inventory/vendor-local/src/django-tastytools/tastytools/api.py/_testdata_from_module
6,634
def invalidate(cls, widget): """ Remove all canvases cached for widget. """ try: for ref in cls._widgets[widget].values(): try: del cls._refs[ref] except KeyError: pass del cls._widgets[widget] except KeyError: pass if widget not in cls._deps: return dependants = cls._deps.get(widget, []) try: del cls._deps[widget] except __HOLE__: pass for w in dependants: cls.invalidate(w)
KeyError
dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/canvas.py/CanvasCache.invalidate
6,635
def cleanup(cls, ref): cls.cleanups += 1 # collect stats w = cls._refs.get(ref, None) del cls._refs[ref] if not w: return widget, wcls, size, focus = w sizes = cls._widgets.get(widget, None) if not sizes: return try: del sizes[(wcls, size, focus)] except __HOLE__: pass if not sizes: try: del cls._widgets[widget] del cls._deps[widget] except KeyError: pass
KeyError
dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/canvas.py/CanvasCache.cleanup
6,636
def set_cursor(self, c): if self.widget_info and self.cacheable: raise self._finalized_error if c is None: try: del self.coords["cursor"] except __HOLE__: pass return self.coords["cursor"] = c + (None,) # data part
KeyError
dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/canvas.py/Canvas.set_cursor
6,637
def rows(self): for r,cv in self.shards: try: assert isinstance(r, int) except __HOLE__: raise AssertionError(r, cv) rows = sum([r for r,cv in self.shards]) assert isinstance(rows, int) return rows
AssertionError
dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/canvas.py/CompositeCanvas.rows
6,638
def shard_body(cviews, shard_tail, create_iter=True, iter_default=None): """ Return a list of (done_rows, content_iter, cview) tuples for this shard and shard tail. If a canvas in cviews is None (eg. when unchanged from shard_cviews_delta()) or if create_iter is False then no iterator is created for content_iter. iter_default is the value used for content_iter when no iterator is created. """ col = 0 body = [] # build the next shard tail cviews_iter = iter(cviews) for col_gap, done_rows, content_iter, tail_cview in shard_tail: while col_gap: try: cview = cviews_iter.next() except __HOLE__: raise CanvasError("cviews do not fill gaps in" " shard_tail!") (trim_left, trim_top, cols, rows, attr_map, canv) = \ cview[:6] col += cols col_gap -= cols if col_gap < 0: raise CanvasError("cviews overflow gaps in" " shard_tail!") if create_iter and canv: new_iter = canv.content(trim_left, trim_top, cols, rows, attr_map) else: new_iter = iter_default body.append((0, new_iter, cview)) body.append((done_rows, content_iter, tail_cview)) for cview in cviews_iter: (trim_left, trim_top, cols, rows, attr_map, canv) = \ cview[:6] if create_iter and canv: new_iter = canv.content(trim_left, trim_top, cols, rows, attr_map) else: new_iter = iter_default body.append((0, new_iter, cview)) return body
StopIteration
dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/canvas.py/shard_body
6,639
def shards_join(shard_lists): """ Return the result of joining shard lists horizontally. All shards lists must have the same number of rows. """ shards_iters = [iter(sl) for sl in shard_lists] shards_current = [i.next() for i in shards_iters] new_shards = [] while True: new_cviews = [] num_rows = min([r for r,cv in shards_current]) shards_next = [] for rows, cviews in shards_current: if cviews: new_cviews.extend(cviews) shards_next.append((rows - num_rows, None)) shards_current = shards_next new_shards.append((num_rows, new_cviews)) # advance to next shards try: for i in range(len(shards_current)): if shards_current[i][0] > 0: continue shards_current[i] = shards_iters[i].next() except __HOLE__: break return new_shards
StopIteration
dataset/ETHPy150Open AnyMesh/anyMesh-Python/example/urwid/canvas.py/shards_join
6,640
def _profile(f = None): import resource def cpu(): return (resource.getrusage(resource.RUSAGE_SELF).ru_utime + resource.getrusage(resource.RUSAGE_SELF).ru_stime) from cProfile import Profile prof = Profile(cpu) try: prof = prof.runctx("_dispatch(f)", globals(), locals()) except __HOLE__: pass import pstats stats = pstats.Stats(prof) stats.strip_dirs() stats.sort_stats('time') stats.print_stats(20)
SystemExit
dataset/ETHPy150Open concurrence/concurrence/lib/concurrence/core.py/_profile
6,641
def run(self): from easydojo.panel import server try: if self.config['port']: server.main(int(self.config['port'])) else: server.main() except __HOLE__: puts(colored.red('Port must be a integer')) sys.exit(1)
ValueError
dataset/ETHPy150Open fabiocerqueira/easydojo/easydojo/commands.py/PanelCommand.run
6,642
def run(self): if not os.path.exists(self.dojo_file): puts(colored.red("This path isn't an easydojo path")) puts("Use:") with indent(4): puts("easy_dojo init <name>") sys.exit(1) event_handler = handlers.DojoEventHandler(self.config) observer = Observer() observer.schedule(event_handler, os.getcwd(), recursive=True) observer.start() try: while True: time.sleep(1) except __HOLE__: observer.stop() observer.join()
KeyboardInterrupt
dataset/ETHPy150Open fabiocerqueira/easydojo/easydojo/commands.py/WatchCommand.run
6,643
def _getPython2ExePathWindows(): # Shortcuts for the default installation directories, to avoid going to # registry at all unless necessary. Any Python2 will do for Scons, so it # can be avoided. if os.path.isfile(r"c:\Python27\python.exe"): return r"c:\Python27\python.exe" elif os.path.isfile(r"c:\Python26\python.exe"): return r"c:\Python26\python.exe" # Windows only code, pylint: disable=E0602,F0401,I0021 try: import _winreg as winreg except __HOLE__: import winreg # lint:ok for search in ("2.7", "2.6"): for hkey_branch in (winreg.HKEY_LOCAL_MACHINE, winreg.HKEY_CURRENT_USER): for arch_key in 0, winreg.KEY_WOW64_32KEY, winreg.KEY_WOW64_64KEY: try: key = winreg.OpenKey( hkey_branch, r"SOFTWARE\Python\PythonCore\%s\InstallPath" % search, 0, winreg.KEY_READ | arch_key ) return Utils.joinpath( winreg.QueryValue(key, ""), "python.exe" ) except WindowsError: # @UndefinedVariable pass
ImportError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/build/SconsInterface.py/_getPython2ExePathWindows
6,644
def on_name_done(self, text): try: self.projectName = text; header = "Choose project destination:" self.window.show_input_panel(header, "", self.on_destination_done, None, None) except __HOLE__: pass
ValueError
dataset/ETHPy150Open fusetools/Fuse.SublimePlugin/fuse.py/CreateProjectCommand.on_name_done
6,645
def on_destination_done(self, text): try: text = text; if not os.path.exists(text): os.makedirs(text) proc = subprocess.Popen([getFusePathFromSettings(), "create", "app", self.projectName, text], shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) code = proc.wait() if code==0: data = { "folders" : [ { "path" : text + "/" + self.projectName } ] } self.window.set_project_data(data) else: out = "" for line in proc.stdout.readlines(): out += line.decode() error_message("Could not create project:\n"+out) except __HOLE__: pass
ValueError
dataset/ETHPy150Open fusetools/Fuse.SublimePlugin/fuse.py/CreateProjectCommand.on_destination_done
6,646
def on_done(self, file_name): try: log().info("Trying to create '" + self.full_path(file_name) + "'") args = [getFusePathFromSettings(), "create", self.targetTemplate, file_name, self.targetFolder] try: proc = subprocess.Popen(args, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except: gFuse.showFuseNotFound() return code = proc.wait() if code == 0: log().info("Succssfully created '" + self.full_path(file_name) + "'") if self.targetTemplate != "app": self.window.open_file(self.full_path(file_name)); else: out = "Could not create file:\n"; out += self.full_path(file_name) + "\n"; for line in proc.stdout.readlines(): out += line.decode() error_message(out) except __HOLE__: pass
ValueError
dataset/ETHPy150Open fusetools/Fuse.SublimePlugin/fuse.py/FuseCreate.on_done
6,647
@app.route('/webhooks/cloudwatch', methods=['OPTIONS', 'POST']) @cross_origin() @auth_required def cloudwatch(): hook_started = webhook_timer.start_timer() try: incomingAlert = parse_notification(request.data) except __HOLE__ as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 400 if g.get('customer', None): incomingAlert.customer = g.get('customer') try: alert = process_alert(incomingAlert) except RejectException as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 403 except Exception as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 500 webhook_timer.stop_timer(hook_started) if alert: body = alert.get_body() body['href'] = absolute_url('/alert/' + alert.id) return jsonify(status="ok", id=alert.id, alert=body), 201, {'Location': body['href']} else: return jsonify(status="error", message="insert or update of cloudwatch alarm failed"), 500
ValueError
dataset/ETHPy150Open guardian/alerta/alerta/app/webhooks/views.py/cloudwatch
6,648
@app.route('/webhooks/pingdom', methods=['OPTIONS', 'GET']) @cross_origin() @auth_required def pingdom(): hook_started = webhook_timer.start_timer() try: incomingAlert = parse_pingdom(request.args.get('message')) except __HOLE__ as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 400 if g.get('customer', None): incomingAlert.customer = g.get('customer') try: alert = process_alert(incomingAlert) except RejectException as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 403 except Exception as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 500 webhook_timer.stop_timer(hook_started) if alert: body = alert.get_body() body['href'] = absolute_url('/alert/' + alert.id) return jsonify(status="ok", id=alert.id, alert=body), 201, {'Location': body['href']} else: return jsonify(status="error", message="insert or update of pingdom check failed"), 500
ValueError
dataset/ETHPy150Open guardian/alerta/alerta/app/webhooks/views.py/pingdom
6,649
def parse_pagerduty(message): incident_key = message['data']['incident']['incident_key'] incident_number = message['data']['incident']['incident_number'] html_url = message['data']['incident']['html_url'] incident_url = '<a href="%s">#%s</a>' % (html_url, incident_number) try: alert = db.get_alerts(query={'attributes.incidentKey': incident_key}, limit=1)[0] except __HOLE__: raise from alerta.app import status_code if message['type'] == 'incident.trigger': status = status_code.OPEN user = message['data']['incident']['assigned_to_user']['name'] text = 'Incident %s assigned to %s' % (incident_url, user) elif message['type'] == 'incident.acknowledge': status = status_code.ACK user = message['data']['incident']['assigned_to_user']['name'] text = 'Incident %s acknowledged by %s' % (incident_url, user) elif message['type'] == 'incident.unacknowledge': status = status_code.OPEN text = 'Incident %s unacknowledged due to timeout' % incident_url elif message['type'] == 'incident.resolve': status = status_code.CLOSED if message['data']['incident']['resolved_by_user']: user = message['data']['incident']['resolved_by_user']['name'] else: user = 'n/a' text = 'Incident %s resolved by %s' % (incident_url, user) elif message['type'] == 'incident.assign': status = status_code.ASSIGN user = message['data']['incident']['assigned_to_user']['name'] text = 'Incident %s manually assigned to %s' % (incident_url, user) elif message['type'] == 'incident.escalate': status = status_code.OPEN user = message['data']['incident']['assigned_to_user']['name'] text = 'Incident %s escalated to %s' % (incident_url, user) elif message['type'] == 'incident.delegate': status = status_code.OPEN user = message['data']['incident']['assigned_to_user']['name'] text = 'Incident %s reassigned due to escalation to %s' % (incident_url, user) else: status = status_code.UNKNOWN text = message['type'] return alert.id, status, text
IndexError
dataset/ETHPy150Open guardian/alerta/alerta/app/webhooks/views.py/parse_pagerduty
6,650
@app.route('/webhooks/pagerduty', methods=['OPTIONS', 'POST']) @cross_origin() @auth_required def pagerduty(): hook_started = webhook_timer.start_timer() data = request.json if data and 'messages' in data: for message in data['messages']: try: id, status, text = parse_pagerduty(message) except __HOLE__ as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 400 try: alert = db.set_status(id=id, status=status, text=text) except Exception as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 500 else: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message="no messages in PagerDuty data payload"), 400 webhook_timer.stop_timer(hook_started) if alert: return jsonify(status="ok"), 200 else: return jsonify(status="error", message="update PagerDuty incident status failed"), 500
IndexError
dataset/ETHPy150Open guardian/alerta/alerta/app/webhooks/views.py/pagerduty
6,651
@app.route('/webhooks/prometheus', methods=['OPTIONS', 'POST']) @cross_origin() @auth_required def prometheus(): if request.json and 'alerts' in request.json: hook_started = webhook_timer.start_timer() status = request.json['status'] for alert in request.json['alerts']: try: incomingAlert = parse_prometheus(status, alert) except __HOLE__ as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 400 if g.get('customer', None): incomingAlert.customer = g.get('customer') try: process_alert(incomingAlert) except RejectException as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 403 except Exception as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 500 webhook_timer.stop_timer(hook_started) else: return jsonify(status="error", message="no alerts in Prometheus notification payload"), 400 return jsonify(status="ok"), 200
ValueError
dataset/ETHPy150Open guardian/alerta/alerta/app/webhooks/views.py/prometheus
6,652
def parse_stackdriver(notification): notification = json.loads(notification) incident = notification['incident'] state = incident['state'] if state == 'acknowledged': try: alert = db.get_alerts(query={'attributes.incidentId': incident['incident_id']}, limit=1)[0] except __HOLE__: raise ValueError('unknown Stackdriver Incident ID: %s' % incident['incident_id']) return state, alert else: if state == 'open': severity = 'critical' create_time = datetime.datetime.fromtimestamp(incident['started_at']) elif state == 'closed': severity = 'ok' create_time = datetime.datetime.fromtimestamp(incident['ended_at']) else: severity = 'indeterminate' create_time = None return state, Alert( resource=incident['resource_name'], event=incident['condition_name'], environment='Production', severity=severity, service=[incident['policy_name']], group='Cloud', text=incident['summary'], attributes={ 'incidentId': incident['incident_id'], 'resourceId': incident['resource_id'], 'moreInfo': '<a href="%s" target="_blank">Stackdriver Console</a>' % incident['url'] }, origin='Stackdriver', event_type='stackdriverAlert', create_time=create_time, raw_data=notification )
IndexError
dataset/ETHPy150Open guardian/alerta/alerta/app/webhooks/views.py/parse_stackdriver
6,653
@app.route('/webhooks/stackdriver', methods=['OPTIONS', 'POST']) @cross_origin() @auth_required def stackdriver(): hook_started = webhook_timer.start_timer() try: state, incomingAlert = parse_stackdriver(request.data) except __HOLE__ as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 400 if g.get('customer', None): incomingAlert.customer = g.get('customer') if state == 'acknowledged': try: alert = db.set_status(id=incomingAlert.id, status='ack', text='acknowledged via Stackdriver') except Exception as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 500 else: try: alert = process_alert(incomingAlert) except RejectException as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 403 except Exception as e: webhook_timer.stop_timer(hook_started) return jsonify(status="error", message=str(e)), 500 webhook_timer.stop_timer(hook_started) if alert: body = alert.get_body() body['href'] = absolute_url('/alert/' + alert.id) return jsonify(status="ok", id=alert.id, alert=body), 201, {'Location': body['href']} else: return jsonify(status="error", message="notification from stackdriver failed"), 500
ValueError
dataset/ETHPy150Open guardian/alerta/alerta/app/webhooks/views.py/stackdriver
6,654
def options(self, section): """Return a list of option names for the given section name.""" try: opts = self._sections[section].copy() except __HOLE__: raise NoSectionError(section) opts.update(self._defaults) if '__name__' in opts: del opts['__name__'] return opts.keys()
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/RawConfigParser.options
6,655
def read(self, filenames): """Read and parse a filename or a list of filenames. Files that cannot be opened are silently ignored; this is designed so that you can specify a list of potential configuration file locations (e.g. current directory, user's home directory, systemwide directory), and all existing configuration files in the list will be read. A single filename may also be given. Return list of successfully read files. """ if isinstance(filenames, basestring): filenames = [filenames] read_ok = [] for filename in filenames: try: fp = open(filename) except __HOLE__: continue self._read(fp, filename) fp.close() read_ok.append(filename) return read_ok
IOError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/RawConfigParser.read
6,656
def readfp(self, fp, filename=None): """Like read() but the argument must be a file-like object. The `fp' argument must have a `readline' method. Optional second argument is the `filename', which if not given, is taken from fp.name. If fp has no `name' attribute, `<???>' is used. """ if filename is None: try: filename = fp.name except __HOLE__: filename = '<???>' self._read(fp, filename)
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/RawConfigParser.readfp
6,657
def items(self, section): try: d2 = self._sections[section] except __HOLE__: if section != DEFAULTSECT: raise NoSectionError(section) d2 = self._dict() d = self._defaults.copy() d.update(d2) if "__name__" in d: del d["__name__"] return d.items()
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/RawConfigParser.items
6,658
def set(self, section, option, value): """Set an option.""" if not section or section == DEFAULTSECT: sectdict = self._defaults else: try: sectdict = self._sections[section] except __HOLE__: raise NoSectionError(section) sectdict[self.optionxform(option)] = value
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/RawConfigParser.set
6,659
def remove_option(self, section, option): """Remove an option.""" if not section or section == DEFAULTSECT: sectdict = self._defaults else: try: sectdict = self._sections[section] except __HOLE__: raise NoSectionError(section) option = self.optionxform(option) existed = option in sectdict if existed: del sectdict[option] return existed
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/RawConfigParser.remove_option
6,660
def get(self, section, option, raw=False, vars=None): """Get an option value for a given section. All % interpolations are expanded in the return values, based on the defaults passed into the constructor, unless the optional argument `raw' is true. Additional substitutions may be provided using the `vars' argument, which must be a dictionary whose contents overrides any pre-existing defaults. The section DEFAULT is special. """ d = self._defaults.copy() try: d.update(self._sections[section]) except __HOLE__: if section != DEFAULTSECT: raise NoSectionError(section) # Update with the entry specific variables if vars: for key, value in vars.items(): d[self.optionxform(key)] = value option = self.optionxform(option) try: value = d[option] except KeyError: raise NoOptionError(option, section) if raw: return value else: return self._interpolate(section, option, value, d)
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/ConfigParser.get
6,661
def items(self, section, raw=False, vars=None): """Return a list of tuples with (name, value) for each option in the section. All % interpolations are expanded in the return values, based on the defaults passed into the constructor, unless the optional argument `raw' is true. Additional substitutions may be provided using the `vars' argument, which must be a dictionary whose contents overrides any pre-existing defaults. The section DEFAULT is special. """ d = self._defaults.copy() try: d.update(self._sections[section]) except __HOLE__: if section != DEFAULTSECT: raise NoSectionError(section) # Update with the entry specific variables if vars: for key, value in vars.items(): d[self.optionxform(key)] = value options = d.keys() if "__name__" in options: options.remove("__name__") if raw: return [(option, d[option]) for option in options] else: return [(option, self._interpolate(section, option, d[option], d)) for option in options]
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/ConfigParser.items
6,662
def _interpolate(self, section, option, rawval, vars): # do the string interpolation value = rawval depth = MAX_INTERPOLATION_DEPTH while depth: # Loop through this until it's done depth -= 1 if "%(" in value: value = self._KEYCRE.sub(self._interpolation_replace, value) try: value = value % vars except __HOLE__, e: raise InterpolationMissingOptionError( option, section, rawval, e.args[0]) else: break if "%(" in value: raise InterpolationDepthError(option, section, rawval) return value
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/ConfigParser._interpolate
6,663
def _interpolate_some(self, option, accum, rest, section, map, depth): if depth > MAX_INTERPOLATION_DEPTH: raise InterpolationDepthError(option, section, rest) while rest: p = rest.find("%") if p < 0: accum.append(rest) return if p > 0: accum.append(rest[:p]) rest = rest[p:] # p is no longer used c = rest[1:2] if c == "%": accum.append("%") rest = rest[2:] elif c == "(": m = self._interpvar_re.match(rest) if m is None: raise InterpolationSyntaxError(option, section, "bad interpolation variable reference %r" % rest) var = self.optionxform(m.group(1)) rest = rest[m.end():] try: v = map[var] except __HOLE__: raise InterpolationMissingOptionError( option, section, rest, var) if "%" in v: self._interpolate_some(option, accum, v, section, map, depth + 1) else: accum.append(v) else: raise InterpolationSyntaxError( option, section, "'%%' must be followed by '%%' or '(', found: %r" % (rest,))
KeyError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/ConfigParser.py/SafeConfigParser._interpolate_some
6,664
def process_response(self, request, response): """ If request.session was modified, or if the configuration is to save the session every time, save the changes and set a session cookie. """ try: accessed = request.session.accessed modified = request.session.modified except __HOLE__: pass else: if accessed: patch_vary_headers(response, ('Cookie',)) if modified or settings.SESSION_SAVE_EVERY_REQUEST: if request.session.get_expire_at_browser_close(): max_age = None expires = None else: max_age = request.session.get_expiry_age() expires_time = time.time() + max_age expires = cookie_date(expires_time) # Save the session data and refresh the client cookie. # Skip session save for 500 responses, refs #3881. if response.status_code != 500: request.session.save() host = request.get_host().split(':')[0] session_cookie_domain = settings.SESSION_COOKIE_DOMAIN if host.endswith(".e.zulip.com"): session_cookie_domain = ".e.zulip.com" response.set_cookie(settings.SESSION_COOKIE_NAME, request.session.session_key, max_age=max_age, expires=expires, domain=session_cookie_domain, path=settings.SESSION_COOKIE_PATH, secure=settings.SESSION_COOKIE_SECURE or None, httponly=settings.SESSION_COOKIE_HTTPONLY or None) return response
AttributeError
dataset/ETHPy150Open zulip/zulip/zerver/middleware.py/SessionHostDomainMiddleware.process_response
6,665
def get_current_page(request, objs, obj, amount): """Returns the current page of obj within objs. """ try: page = int((request.POST if request.method == 'POST' else request.GET).get("page")) except TypeError: try: idx = tuple(objs).index(obj) except __HOLE__: page = 1 else: page = int(idx / amount) + 1 return page
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/utils.py/get_current_page
6,666
def test_invalid_routers(self): try: self.state._update_network_status('''ns/all= r fake YkkmgCNRV1/35OPWDvo7+1bmfoo tanLV/4ZfzpYQW0xtGFqAa46foo 2011-12-12 16:29:16 12.45.56.78 443 80 r fake YkkmgCNRV1/35OPWDvo7+1bmfoo tanLV/4ZfzpYQW0xtGFqAa46foo 2011-12-12 16:29:16 12.45.56.78 443 80 s Exit Fast Guard HSDir Named Running Stable V2Dir Valid FutureProof w Bandwidth=518000 p accept 43,53,79-81,110,143,194,220,443,953,989-990,993,995,1194,1293,1723,1863,2082-2083,2086-2087,2095-2096,3128,4321,5050,5190,5222-5223,6679,6697,7771,8000,8008,8080-8081,8090,8118,8123,8181,8300,8443,8888 .''') self.fail() except __HOLE__, e: self.assertTrue('"s "' in str(e))
RuntimeError
dataset/ETHPy150Open meejah/txtorcon/test/test_torstate.py/StateTests.test_invalid_routers
6,667
def test_build_circuit_error(self): """ tests that we check the callback properly """ try: self.state._find_circuit_after_extend("FOO 1234") self.assertTrue(False) except __HOLE__, e: self.assertTrue('Expected EXTENDED' in str(e))
RuntimeError
dataset/ETHPy150Open meejah/txtorcon/test/test_torstate.py/StateTests.test_build_circuit_error
6,668
def pid_exists(pid): """Check whether pid exists in the current process table.""" if pid < 0: return False try: os.kill(pid, 0) except __HOLE__ as e: return e.errno == errno.EPERM else: return True
OSError
dataset/ETHPy150Open limodou/uliweb/uliweb/utils/process.py/pid_exists
6,669
def wait_pid(pid, timeout=None, callback=None): """Wait for process with pid 'pid' to terminate and return its exit status code as an integer. If pid is not a children of os.getpid() (current process) just waits until the process disappears and return None. If pid does not exist at all return None immediately. Raise TimeoutExpired on timeout expired (if specified). """ def check_timeout(delay): if timeout is not None: if time.time() >= stop_at: if callback: callback(pid) else: raise TimeoutExpired time.sleep(delay) return min(delay * 2, 0.04) if timeout is not None: waitcall = lambda: os.waitpid(pid, os.WNOHANG) stop_at = time.time() + timeout else: waitcall = lambda: os.waitpid(pid, 0) delay = 0.0001 while 1: try: retpid, status = waitcall() except __HOLE__ as err: if err.errno == errno.EINTR: delay = check_timeout(delay) continue elif err.errno == errno.ECHILD: # This has two meanings: # - pid is not a child of os.getpid() in which case # we keep polling until it's gone # - pid never existed in the first place # In both cases we'll eventually return None as we # can't determine its exit status code. while 1: if pid_exists(pid): delay = check_timeout(delay) else: return else: raise else: if retpid == 0: # WNOHANG was used, pid is still running delay = check_timeout(delay) continue # process exited due to a signal; return the integer of # that signal if os.WIFSIGNALED(status): return os.WTERMSIG(status) # process exited using exit(2) system call; return the # integer exit(2) system call has been called with elif os.WIFEXITED(status): return os.WEXITSTATUS(status) else: # should never happen raise RuntimeError("unknown process exit status")
OSError
dataset/ETHPy150Open limodou/uliweb/uliweb/utils/process.py/wait_pid
6,670
def default(restart_cb=None, restart_func=None, close_fds=True): '''Sets up lazarus in default mode. See the :py:func:`custom` function for a more powerful mode of use. The default mode of lazarus is to watch all modules rooted at ``PYTHONPATH`` for changes and restart when they take place. Keyword arguments: restart_cb -- Callback invoked prior to restarting the process; allows for any cleanup to occur prior to restarting. Returning anything other than *None* in the callback will cancel the restart. restart_func -- Function invoked to restart the process. This supplants the default behavior of using *sys.executable* and *sys.argv*. close_fds -- Whether all file descriptors other than *stdin*, *stdout*, and *stderr* should be closed A simple example: >>> import lazarus >>> lazarus.default() >>> lazarus.stop() ''' if _active: msg = 'lazarus is already active' raise RuntimeWarning(msg) _python_path = os.getenv('PYTHONPATH') if not _python_path: msg = 'PYTHONPATH is not set' raise RuntimeError(msg) if restart_cb and not callable(restart_cb): msg = 'restart_cb keyword argument is not callable' raise TypeError(msg) if restart_func and not callable(restart_func): msg = 'restart_func keyword argument is not callable' raise TypeError(msg) global _close_fds _close_fds = close_fds try: from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler except __HOLE__ as ie: msg = 'no watchdog support (%s)' % str(ie) raise RuntimeError(msg) class _Handler(FileSystemEventHandler): def __init__(self): self.active = True def dispatch(self, event): if not self.active: return super(_Handler, self).dispatch(event) def all_events(self, event): if is_restart_event(event): cancelled = _restart() if not cancelled: self.active = False def on_created(self, event): self.all_events(event) def on_deleted(self, event): self.all_events(event) def on_modified(self, event): self.all_events(event) def on_moved(self, event): self.all_events(event) global _observer _observer = Observer() handler = _Handler() _observer.schedule(handler, _python_path, recursive=True) global _restart_cb _restart_cb = restart_cb global _restart_func _restart_func = restart_func _activate() _observer.start()
ImportError
dataset/ETHPy150Open formwork-io/lazarus/lazarus/__init__.py/default
6,671
def custom(srcpaths, event_cb=None, poll_interval=1, recurse=True, restart_cb=None, restart_func=None, close_fds=True): '''Sets up lazarus in custom mode. See the :py:func:`default` function for a simpler mode of use. The custom mode of lazarus is to watch all modules rooted at any of the source paths provided for changes and restart when they take place. Keyword arguments: event_cb -- Callback invoked when a file rooted at a source path changes. Without specifying an event callback, changes to any module rooted at a source path will trigger a restart. poll_interval -- Rate at which changes will be detected. The default value of ``1`` means it may take up to one second to detect changes. Decreasing this value may lead to unnecessary overhead. recurse -- Whether to watch all subdirectories of every source path for changes or only the paths provided. restart_cb -- Callback invoked prior to restarting the process; allows for any cleanup to occur prior to restarting. Returning anything other than *None* in the callback will cancel the restart. restart_func -- Function invoked to restart the process. This supplants the default behavior of using *sys.executable* and *sys.argv*. close_fds -- Whether all file descriptors other than *stdin*, *stdout*, and *stderr* should be closed An example of using a cleanup function prior to restarting: >>> def cleanup(): ... pass >>> import lazarus >>> lazarus.custom(os.curdir, restart_cb=cleanup) >>> lazarus.stop() An example of avoiding restarts when any ``__main__.py`` changes: >>> def skip_main(event): ... if event.src_path == '__main__.py': ... return False ... return True >>> import lazarus >>> lazarus.custom(os.curdir, event_cb=skip_main) >>> lazarus.stop() ''' if _active: msg = 'lazarus is already active' raise RuntimeWarning(msg) if restart_cb and not callable(restart_cb): msg = 'restart_cb keyword argument is not callable' raise TypeError(msg) if restart_func and not callable(restart_func): msg = 'restart_func keyword argument is not callable' raise TypeError(msg) global _close_fds _close_fds = close_fds try: from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler except __HOLE__ as ie: msg = 'no watchdog support (%s)' % str(ie) raise RuntimeError(msg) class _Handler(FileSystemEventHandler): def __init__(self): self.active = True def dispatch(self, event): if not self.active: return super(_Handler, self).dispatch(event) def all_events(self, event): # if caller wants event_cb control, defer _restart logic to them # (caller decides whether this is a restart event) if event_cb: if event_cb(event): cancelled = _restart() if not cancelled: self.active = False # use default is_restart_event logic elif is_restart_event(event): cancelled = _restart() if not cancelled: self.active = False self.active = False def on_created(self, event): self.all_events(event) def on_deleted(self, event): self.all_events(event) def on_modified(self, event): self.all_events(event) def on_moved(self, event): self.all_events(event) global _observer kwargs = {'timeout': poll_interval} _observer = Observer(**kwargs) global _restart_cb _restart_cb = restart_cb handler = _Handler() srcpaths = _as_list(srcpaths) kwargs = {} if recurse: kwargs['recursive'] = True for srcpath in srcpaths: _observer.schedule(handler, srcpath, **kwargs) _activate() _observer.start()
ImportError
dataset/ETHPy150Open formwork-io/lazarus/lazarus/__init__.py/custom
6,672
def simple_calculations(traj, arg1, simple_kwarg): if traj.v_idx == 0: # to shuffle runs time.sleep(0.1) rootlogger = get_root_logger() if not 'runs' in traj.res: traj.res.f_add_result_group('runs') rootlogger.info('>>>>>Starting Simple Calculations') my_dict = {} my_dict2={} param_dict=traj.parameters.f_to_dict(fast_access=True,short_names=False) for key in sorted(param_dict.keys())[0:5]: val = param_dict[key] if 'trial' in key: continue newkey = key.replace('.','_') my_dict[newkey] = str(val) my_dict2[newkey] = [str(val)+' juhu!'] my_dict['__FLOAT'] = 44.0 my_dict['__INT'] = 66 my_dict['__NPINT'] = np.int_(55) my_dict['__INTaRRAy'] = np.array([1,2,3]) my_dict['__FLOATaRRAy'] = np.array([1.0,2.0,41.0]) my_dict['__FLOATaRRAy_nested'] = np.array([np.array([1.0,2.0,41.0]),np.array([1.0,2.0,41.0])]) my_dict['__STRaRRAy'] = np.array(['sds','aea','sf']) my_dict['__LONG'] = compat.long_type(4266) my_dict['__UNICODE'] = u'sdfdsf' my_dict['__BYTES'] = b'zweiundvierzig' my_dict['__NUMPY_UNICODE'] = np.array([u'$%&ddss']) my_dict['__NUMPY_BYTES'] = np.array([b'zweiundvierzig']) keys = sorted(to_dict_wo_config(traj).keys()) for idx,key in enumerate(keys[0:5]): keys[idx] = key.replace('.', '_') listy=traj.f_add_result_group('List', comment='Im a result group') traj.f_add_result_group('Iwiiremainempty.yo', comment='Empty group!') traj.Iwiiremainempty.f_store_child('yo') traj.Iwiiremainempty.f_add_link('kkk',listy ) listy.f_add_link('hhh', traj.Iwiiremainempty) if not traj.Iwiiremainempty.kkk.v_full_name == traj.List.v_full_name: raise RuntimeError() if not traj.Iwiiremainempty.kkk.v_full_name == traj.List.hhh.kkk.v_full_name: raise RuntimeError() traj.f_add_result('runs.' + traj.v_crun + '.ggg', 5555, comment='ladida') traj.res.runs.f_add_result(traj.v_crun + '.ggjg', 5555, comment='didili') traj.res.runs.f_add_result('hhg', 5555, comment='jjjj') traj.res.f_add_result(name='lll', comment='duh', data=444) x = traj.res.f_add_result(name='nested', comment='duh') x['nested0.nested1.nested2.nested3'] = 44 traj.res.f_add_result(name='test.$set.$', comment='duh', data=444) try: traj.f_add_config('teeeeest', 12) raise RuntimeError() except __HOLE__: pass # if not traj.f_contains('results.runs.' + traj.v_crun + '.ggjg', shortcuts=False): # raise RuntimeError() # if not traj.f_contains('results.runs.' + traj.v_crun + '.ggg', shortcuts=False): # raise RuntimeError() if not traj.f_contains('results.runs.' + traj.v_crun + '.hhg', shortcuts=False): raise RuntimeError() traj.f_add_result('List.Of.Keys', dict1=my_dict, dict2=my_dict2, comment='Test') traj.List.f_store_child('Of', recursive=True) traj.f_add_result('DictsNFrame', keys=keys, comment='A dict!') traj.f_add_result('ResMatrix',np.array([1.2,2.3]), comment='ResMatrix') traj.f_add_result('empty.stuff', (), [], {}, np.array([]), comment='empty stuff') #traj.f_add_derived_parameter('All.To.String', str(traj.f_to_dict(fast_access=True,short_names=False))) myframe = pd.DataFrame(data ={'TC1':[1,2,3],'TC2':['Waaa','',''],'TC3':[1.2,42.2,77]}) myseries = myframe['TC1'] mypanel = pd.Panel({'Item1' : pd.DataFrame(np.ones((4, 3))),'Item2' : pd.DataFrame(np.ones((4, 2)))}) # p4d = pd.Panel4D(np.random.randn(2, 2, 5, 4), # labels=['Label1','Label2'], # items=['Item1', 'Item2'], # major_axis=pd.date_range('1/1/2000', periods=5), # minor_axis=['A', 'B', 'C', 'D']) traj.f_add_result('myseries', myseries, comment='dd') traj.f_store_item('myseries') traj.f_add_result('mypanel', mypanel, comment='dd') #traj.f_add_result('mypanel4d', p4d, comment='dd') traj.f_get('DictsNFrame').f_set(myframe) traj.f_add_result('IStore.SimpleThings',1.0,3,np.float32(5.0), 'Iamstring', (1,2,3), [4,5,6], zwei=2).v_comment='test' traj.f_add_derived_parameter('super.mega',33, comment='It is huuuuge!') traj.super.f_set_annotations(AgainATestAnnotations='I am a string!111elf') traj.f_add_result(PickleResult,'pickling.result.proto1', my_dict2, protocol=1, comment='p1') traj.f_add_result(PickleResult,'pickling.result.proto2', my_dict2, protocol=2, comment='p2') traj.f_add_result(PickleResult,'pickling.result.proto0', my_dict2, protocol=0, comment='p0') traj.f_add_result(SparseResult, 'sparse.csc', traj.csc_mat, 42).v_comment='sdsa' traj.f_add_result(SparseResult, 'sparse.bsr', traj.bsr_mat, 52).v_comment='sdsa' traj.f_add_result(SparseResult, 'sparse.csr', traj.csr_mat, 62).v_comment='sdsa' traj.f_add_result(SparseResult, 'sparse.dia', traj.dia_mat, 72).v_comment='sdsa' traj.sparse.v_comment = 'I contain sparse data!' myobjtab = ObjectTable(data={'strings':['a','abc','qwertt'], 'ints':[1,2,3]}) traj.f_add_result('object.table', myobjtab, comment='k').v_annotations.f_set(test=42) traj.object.f_set_annotations(test2=42.42) traj.f_add_result('$.here', 77, comment='huhu') traj.f_add_result('tor.hot.$', dollah=77, comment='duh!') traj.f_add_result('tor.hot.rrr.$.j', 77, comment='duh!') traj.f_add_result('tor.hot.rrr.crun.jjj', 777, comment='duh**2!') #traj.f_add_result('PickleTerror', result_type=PickleResult, test=traj.SimpleThings) rootlogger.info('<<<<<<Finished Simple Calculations') # let's see if the traj can also always be returned if isinstance(traj.v_storage_service, LockWrapper): traj.v_storage_service.pickle_lock = False return 42, traj
TypeError
dataset/ETHPy150Open SmokinCaterpillar/pypet/pypet/tests/testutils/data.py/simple_calculations
6,673
def _get_loss_function(self, loss): """Get concrete ``LossFunction`` object for str ``loss``. """ try: loss_ = self.loss_functions[loss] loss_class, args = loss_[0], loss_[1:] if loss in ('huber', 'epsilon_insensitive', 'squared_epsilon_insensitive'): args = (self.epsilon, ) return loss_class(*args) except __HOLE__: raise ValueError("The loss %s is not supported. " % loss)
KeyError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/linear_model/stochastic_gradient.py/BaseSGD._get_loss_function
6,674
def _get_learning_rate_type(self, learning_rate): try: return LEARNING_RATE_TYPES[learning_rate] except __HOLE__: raise ValueError("learning rate %s " "is not supported. " % learning_rate)
KeyError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/linear_model/stochastic_gradient.py/BaseSGD._get_learning_rate_type
6,675
def _get_penalty_type(self, penalty): penalty = str(penalty).lower() try: return PENALTY_TYPES[penalty] except __HOLE__: raise ValueError("Penalty %s is not supported. " % penalty)
KeyError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/linear_model/stochastic_gradient.py/BaseSGD._get_penalty_type
6,676
def test_state_crud(self): saved = ActionExecutionStateTests._create_save_actionstate() retrieved = ActionExecutionState.get_by_id(saved.id) self.assertDictEqual(saved.query_context, retrieved.query_context) self.assertEqual(saved.query_module, retrieved.query_module) ActionExecutionStateTests._delete(model_objects=[retrieved]) try: retrieved = ActionExecutionState.get_by_id(saved.id) except __HOLE__: retrieved = None self.assertIsNone(retrieved, 'managed to retrieve after failure.')
ValueError
dataset/ETHPy150Open StackStorm/st2/st2common/tests/unit/test_db_action_state.py/ActionExecutionStateTests.test_state_crud
6,677
def type_of_target(y): """Determine the type of data indicated by target `y` Parameters ---------- y : array-like Returns ------- target_type : string One of: * 'continuous': `y` is an array-like of floats that are not all integers, and is 1d or a column vector. * 'continuous-multioutput': `y` is a 2d array of floats that are not all integers, and both dimensions are of size > 1. * 'binary': `y` contains <= 2 discrete values and is 1d or a column vector. * 'multiclass': `y` contains more than two discrete values, is not a sequence of sequences, and is 1d or a column vector. * 'multiclass-multioutput': `y` is a 2d array that contains more than two discrete values, is not a sequence of sequences, and both dimensions are of size > 1. * 'multilabel-indicator': `y` is a label indicator matrix, an array of two dimensions with at least two columns, and at most 2 unique values. * 'unknown': `y` is array-like but none of the above, such as a 3d array, sequence of sequences, or an array of non-sequence objects. Examples -------- >>> import numpy as np >>> type_of_target([0.1, 0.6]) 'continuous' >>> type_of_target([1, -1, -1, 1]) 'binary' >>> type_of_target(['a', 'b', 'a']) 'binary' >>> type_of_target([1.0, 2.0]) 'binary' >>> type_of_target([1, 0, 2]) 'multiclass' >>> type_of_target([1.0, 0.0, 3.0]) 'multiclass' >>> type_of_target(['a', 'b', 'c']) 'multiclass' >>> type_of_target(np.array([[1, 2], [3, 1]])) 'multiclass-multioutput' >>> type_of_target([[1, 2]]) 'multiclass-multioutput' >>> type_of_target(np.array([[1.5, 2.0], [3.0, 1.6]])) 'continuous-multioutput' >>> type_of_target(np.array([[0, 1], [1, 1]])) 'multilabel-indicator' """ valid = ((isinstance(y, (Sequence, spmatrix)) or hasattr(y, '__array__')) and not isinstance(y, string_types)) if not valid: raise ValueError('Expected array-like (array or non-string sequence), ' 'got %r' % y) if is_multilabel(y): return 'multilabel-indicator' try: y = np.asarray(y) except __HOLE__: # Known to fail in numpy 1.3 for array of arrays return 'unknown' # The old sequence of sequences format try: if (not hasattr(y[0], '__array__') and isinstance(y[0], Sequence) and not isinstance(y[0], string_types)): raise ValueError('You appear to be using a legacy multi-label data' ' representation. Sequence of sequences are no' ' longer supported; use a binary array or sparse' ' matrix instead.') except IndexError: pass # Invalid inputs if y.ndim > 2 or (y.dtype == object and len(y) and not isinstance(y.flat[0], string_types)): return 'unknown' # [[[1, 2]]] or [obj_1] and not ["label_1"] if y.ndim == 2 and y.shape[1] == 0: return 'unknown' # [[]] if y.ndim == 2 and y.shape[1] > 1: suffix = "-multioutput" # [[1, 2], [1, 2]] else: suffix = "" # [1, 2, 3] or [[1], [2], [3]] # check float and contains non-integer float values if y.dtype.kind == 'f' and np.any(y != y.astype(int)): # [.1, .2, 3] or [[.1, .2, 3]] or [[1., .2]] and not [1., 2., 3.] return 'continuous' + suffix if (len(np.unique(y)) > 2) or (y.ndim >= 2 and len(y[0]) > 1): return 'multiclass' + suffix # [1, 2, 3] or [[1., 2., 3]] or [[1, 2]] else: return 'binary' # [1, 2] or [["a"], ["b"]]
ValueError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/utils/multiclass.py/type_of_target
6,678
def put_profile(self, request_dict): #Get the profile, or if not already created, create one p,created = ActivityProfile.objects.get_or_create(profile_id=request_dict['params']['profileId'],activity_id=request_dict['params']['activityId']) # Profile being PUT is not json if "application/json" not in request_dict['headers']['CONTENT_TYPE']: try: profile = ContentFile(request_dict['profile'].read()) except: try: profile = ContentFile(request_dict['profile']) except: profile = ContentFile(str(request_dict['profile'])) # If a profile already existed with the profileId and activityId if not created: #If it already exists delete it etag.check_preconditions(request_dict,p, required=True) if p.profile: try: p.profile.delete() except __HOLE__: # probably was json before p.json_profile = {} self.save_non_json_profile(p, created, profile, request_dict) # Profile being PUT is json else: # If a profile already existed with the profileId and activityId (overwrite existing profile data) if not created: etag.check_preconditions(request_dict, p, required=True) the_profile = request_dict['profile'] p.json_profile = the_profile p.content_type = request_dict['headers']['CONTENT_TYPE'] p.etag = etag.create_tag(the_profile) #Set updated if 'updated' in request_dict['headers'] and request_dict['headers']['updated']: p.updated = request_dict['headers']['updated'] else: p.updated = datetime.datetime.utcnow().replace(tzinfo=utc) p.save()
OSError
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/managers/ActivityProfileManager.py/ActivityProfileManager.put_profile
6,679
def get_profile_ids(self, activity_id, since=None): ids = [] #If there is a since param return all profileIds since then if since: try: # this expects iso6801 date/time format "2013-02-15T12:00:00+00:00" profs = ActivityProfile.objects.filter(updated__gte=since, activity_id=activity_id) except __HOLE__: err_msg = 'Since field is not in correct format for retrieval of activity profile IDs' raise ParamError(err_msg) ids = [p.profile_id for p in profs] else: #Return all IDs of profiles associated with this activity b/c there is no since param ids = ActivityProfile.objects.filter(activity_id=activity_id).values_list('profile_id', flat=True) return ids
ValidationError
dataset/ETHPy150Open adlnet/ADL_LRS/lrs/managers/ActivityProfileManager.py/ActivityProfileManager.get_profile_ids
6,680
def _get_config(self): '''Getting a config dictionary using the giving profile. See the profile list in conf/settings.py''' try: config = settings.DOJO_PROFILES[self.profile] return config except __HOLE__: pass return None
KeyError
dataset/ETHPy150Open klipstein/dojango/dojango/util/config.py/Config._get_config
6,681
def __init__(self, modifiers, key, *commands, **kwds): self.modifiers = modifiers self.key = key self.commands = commands self.desc = kwds.get("desc", "") if key not in xcbq.keysyms: raise utils.QtileError("Unknown key: %s" % key) self.keysym = xcbq.keysyms[key] try: self.modmask = utils.translate_masks(self.modifiers) except __HOLE__ as v: raise utils.QtileError(v)
KeyError
dataset/ETHPy150Open qtile/qtile/libqtile/config.py/Key.__init__
6,682
def __init__(self, modifiers, button, *commands, **kwargs): self.start = kwargs.get("start") self.focus = kwargs.get("focus", "before") self.modifiers = modifiers self.button = button self.commands = commands try: self.button_code = int(self.button.replace('Button', '')) self.modmask = utils.translate_masks(self.modifiers) except __HOLE__ as v: raise utils.QtileError(v)
KeyError
dataset/ETHPy150Open qtile/qtile/libqtile/config.py/Drag.__init__
6,683
def __init__(self, modifiers, button, *commands, **kwargs): self.focus = kwargs.get("focus", "before") self.modifiers = modifiers self.button = button self.commands = commands try: self.button_code = int(self.button.replace('Button', '')) self.modmask = utils.translate_masks(self.modifiers) except __HOLE__ as v: raise utils.QtileError(v)
KeyError
dataset/ETHPy150Open qtile/qtile/libqtile/config.py/Click.__init__
6,684
def __init__(self, title=None, wm_class=None, role=None, wm_type=None, wm_instance_class=None, net_wm_pid=None): if not title: title = [] if not wm_class: wm_class = [] if not role: role = [] if not wm_type: wm_type = [] if not wm_instance_class: wm_instance_class = [] if not net_wm_pid: net_wm_pid = [] try: net_wm_pid = list(map(int, net_wm_pid)) except __HOLE__: error = 'Invalid rule for net_wm_pid: "%s" '\ 'only ints allowed' % str(net_wm_pid) raise utils.QtileError(error) self._rules = [('title', t) for t in title] self._rules += [('wm_class', w) for w in wm_class] self._rules += [('role', r) for r in role] self._rules += [('wm_type', r) for r in wm_type] self._rules += [('wm_instance_class', w) for w in wm_instance_class] self._rules += [('net_wm_pid', w) for w in net_wm_pid]
ValueError
dataset/ETHPy150Open qtile/qtile/libqtile/config.py/Match.__init__
6,685
def load(self, ignore_not_found=False): """ Load each path in order. Remember paths already loaded and only load new ones. """ data = self.dict_class() for path in self.paths: if path in self.paths_loaded: continue try: with open(path, 'r') as file: path_data = yaml.load(file.read()) if path_data is not None: data = dict_merge(data, path_data) self.paths_loaded.add(path) except __HOLE__: if not ignore_not_found: raise ConfigNotFound("Config URL '%s' not found" % path) self.data = data
IOError
dataset/ETHPy150Open ooici/pyon/pyon/util/config.py/Config.load
6,686
def run(input=sys.stdin, output=sys.stdout): r"""CouchDB view function handler implementation for Python. :param input: the readable file-like object to read input from :param output: the writable file-like object to write output to """ functions = [] def _writejson(obj): obj = json.encode(obj) if isinstance(obj, util.utype): obj = obj.encode('utf-8') output.write(obj) output.write(b'\n') output.flush() def _log(message): if not isinstance(message, util.strbase): message = json.encode(message) _writejson({'log': message}) def reset(config=None): del functions[:] return True def add_fun(string): string = BOM_UTF8 + string.encode('utf-8') globals_ = {} try: util.pyexec(string, {'log': _log}, globals_) except Exception as e: return {'error': { 'id': 'map_compilation_error', 'reason': e.args[0] }} err = {'error': { 'id': 'map_compilation_error', 'reason': 'string must eval to a function ' '(ex: "def(doc): return 1")' }} if len(globals_) != 1: return err function = list(globals_.values())[0] if type(function) is not FunctionType: return err functions.append(function) return True def map_doc(doc): results = [] for function in functions: try: results.append([[key, value] for key, value in function(doc)]) except Exception as e: log.error('runtime error in map function: %s', e, exc_info=True) results.append([]) _log(traceback.format_exc()) return results def reduce(*cmd, **kwargs): code = BOM_UTF8 + cmd[0][0].encode('utf-8') args = cmd[1] globals_ = {} try: util.pyexec(code, {'log': _log}, globals_) except Exception as e: log.error('runtime error in reduce function: %s', e, exc_info=True) return {'error': { 'id': 'reduce_compilation_error', 'reason': e.args[0] }} err = {'error': { 'id': 'reduce_compilation_error', 'reason': 'string must eval to a function ' '(ex: "def(keys, values): return 1")' }} if len(globals_) != 1: return err function = list(globals_.values())[0] if type(function) is not FunctionType: return err rereduce = kwargs.get('rereduce', False) results = [] if rereduce: keys = None vals = args else: if args: keys, vals = zip(*args) else: keys, vals = [], [] if util.funcode(function).co_argcount == 3: results = function(keys, vals, rereduce) else: results = function(keys, vals) return [True, [results]] def rereduce(*cmd): # Note: weird kwargs is for Python 2.5 compat return reduce(*cmd, **{'rereduce': True}) handlers = {'reset': reset, 'add_fun': add_fun, 'map_doc': map_doc, 'reduce': reduce, 'rereduce': rereduce} try: while True: line = input.readline() if not line: break try: cmd = json.decode(line) log.debug('Processing %r', cmd) except ValueError as e: log.error('Error: %s', e, exc_info=True) return 1 else: retval = handlers[cmd[0]](*cmd[1:]) log.debug('Returning %r', retval) _writejson(retval) except __HOLE__: return 0 except Exception as e: log.error('Error: %s', e, exc_info=True) return 1
KeyboardInterrupt
dataset/ETHPy150Open djc/couchdb-python/couchdb/view.py/run
6,687
def application(environ, start_response): status = '200 OK' response_headers = [('Content-type', 'text/html; charset=utf-8')] message = "" config = {} try: config_file = open(configfile, 'r') config_records = json.load(config_file) config_file.close() config = config_records #message += str(config) + "<br>" except Exception, error: message += "Config file error: " + str(error) + "<br>" form_data = cgi.FieldStorage(environ=environ, fp=environ['wsgi.input']) for key in form_data: value = form_data.getvalue(key) config[key] = value if(len(form_data) > 1): try: config_file = open(configfile, 'w') #config_file.write(unicode(json.dumps(config,ensure_ascii=False))) json.dump(config, config_file) config_file.close() message += "Configuration updated.<br>" except __HOLE__ as e: message += "Unable to write config file.<br> %s" % str(e) textselect_jsonfiles = [f for f in os.listdir(textselectdir) if re.match(r'.*\.json$', f, re.IGNORECASE)] textselect_num_tagged = len(textselect_jsonfiles) imagequestions = "" tweetquestions = "" textinstructions = "" try: imagequestions = str(config['imagequestions']) tweetquestions = str(config['tweetquestions']) textinstructions = str(config['textinstructions']) except: message += "Configuration was incomplete.<br>" #message += str(config) html = "<html><title>Docent Learner Administration</title><h1>Docent Learner Administration</h1>" html += "<form action=\"/docent-learner/dl/admin/admin.py\" method=\"post\">" html += "<input type=\"hidden\" name=\"test\" value=\"bleh\">" html += """<hr><h2>Configure text select</h2> <a href='/docent-learner/dl/textselect.py'>Go to text select.</a><br> <br> <textarea rows='3' cols='100' name='textinstructions'>%s</textarea> <br><br> There are currently %s tagged examples. <br><br> <a href="/docent-learner/dl/admin/buildtextselectmodel.py">Build textselect model</a> """ % (textinstructions, str(textselect_num_tagged)) html += """ <br><br> <hr><h2>Configure the image tagger</h2> <a href='/docent-learner/dl/images.py'>Go to image tagger.</a><br> <br> Image Questions (in html form code)<br> <textarea rows='20' cols='100' name='imagequestions'>%s</textarea> """ % (imagequestions) html += """ <br><br> Image Mode: <br> <input type="radio" name="imagemode" value="single" checked> Capture only one observation per image <br> <input type="radio" name="imagemode" value="multiple" disabled> Capture multiple observations silently <br> <input type="radio" name="imagemode" value="gamify" disabled> Capture multiple observations and gamify <br> """ html += """ <br><br> <hr><h2>Configure the tweet tagger</h2> <a href='/docent-learner/dl/tweets.py'>Go to tweet tagger.</a><br> <br> Tweet Questions (in html form code)<br> <textarea rows='20' cols='100' name='tweetquestions'>%s</textarea> """ % (tweetquestions) html += """ <br><br> Tweet Mode: <br> <input type="radio" name="tweetmode" value="single" checked> Capture only one observation per image <br> <input type="radio" name="tweetmode" value="multiple" disabled> Capture multiple observations silently <br> <input type="radio" name="tweetmode" value="gamify" disabled> Capture multiple observations and gamify <br> """ html += "<br><br><hr><input type=\"submit\" value=\"Save config\" style=\"width:200px; height:75px;\"><br>" html += "</form>" html += "<br><br>" html += message html += "</html>" start_response(status, response_headers) return [html]
IOError
dataset/ETHPy150Open ericwhyne/docent-learner/src/admin/admin.py/application
6,688
def do_cd(self, arg): '''Change selected IMAP folder.''' try: args = docopt.docopt('Usage: cd <directory>', arg) except __HOLE__: return cd_result = imap_cli.change_dir(self.imap_account, directory=args['<directory>']) if cd_result == -1: sys.stdout.write('IMAP Folder can\'t be found\n') else: self.prompt = '(imap-cli "{}") '.format(args['<directory>'])
SystemExit
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/scripts/imap_shell.py/ImapShell.do_cd
6,689
def do_cp(self, arg): '''Copy mail from one mailbox to another.''' try: args = docopt.docopt('Usage: cp <dest> <mail_id>...', arg) except __HOLE__: return copy.copy(self.imap_account, args['<mail_id>'], args['<dest>'])
SystemExit
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/scripts/imap_shell.py/ImapShell.do_cp
6,690
def do_flag(self, arg): '''Set or Unset flag on mails.''' try: args = docopt.docopt('\n'.join([ 'Usage: flag [options] <mail_id> <flag>', '', 'Options:', ' -u, --unset Remove flag instead of setting them', ' -h, --help Show help options', ]), argv=arg) except __HOLE__: return flag.flag(self.imap_account, [args['<mail_id>']], args['<flag>'], unset=args['--unset'])
SystemExit
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/scripts/imap_shell.py/ImapShell.do_flag
6,691
def do_list(self, arg): '''List mail in specified folder.''' try: args = docopt.docopt('\n'.join([ 'Usage: list [options] [<directory>]', '', 'Options:', ' -l, --limit=<LIMIT> Limit number of mail displayed', ' -h, --help Show this message', ]), argv=arg) except __HOLE__: return try: limit = int(args['--limit'] or 10) except ValueError: limit = 10 for mail_info in search.fetch_mails_info(self.imap_account, limit=limit): sys.stdout.write( u'UID : {:<10} From : {:<40.40} Subject : {:.50}\n'.format( mail_info['uid'], mail_info['from'], mail_info['subject']))
SystemExit
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/scripts/imap_shell.py/ImapShell.do_list
6,692
def do_mv(self, arg): '''Move mail from one mailbox to another.''' try: args = docopt.docopt('Usage: cp <dest> <mail_id>...', arg) except __HOLE__: return copy.copy(self.imap_account, args['<mail_id>'], args['<dest>']) flag.flag(self.imap_account, args['<mail_id>'], [const.FLAG_DELETED]) self.imap_account.expunge()
SystemExit
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/scripts/imap_shell.py/ImapShell.do_mv
6,693
def do_rm(self, arg): '''Remove mail from one mailbox.''' try: args = docopt.docopt('Usage: rm <mail_id>...', arg) except __HOLE__: return if self.delete_conf['delete_method'] == 'MOVE_TO_TRASH': copy.copy(self.imap_account, args['<mail_id>'], self.delete_conf['trash_directory']) flag.flag(self.imap_account, args['<mail_id>'], [const.FLAG_DELETED]) if self.delete_conf['delete_method'] in ['MOVE_TO_TRASH', 'EXPUNGE']: self.imap_account.expunge()
SystemExit
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/scripts/imap_shell.py/ImapShell.do_rm
6,694
def do_read(self, arg): '''Read mail by uid.''' try: args = docopt.docopt(u'\n'.join([ u'Usage: read [options] <mail_uid> [<save_directory>]', u'', u'Options:', u' -b, --browser Open mail in browser', ]), arg) except __HOLE__: return fetched_mail = fetch.read(self.imap_account, args['<mail_uid>'], save_directory=args['<save_directory>']) if fetched_mail is None: log.error("Mail was not fetched, an error occured") if args['--browser'] is True: temp_file = tempfile.NamedTemporaryFile(delete=False) temp_file.write(fetch.display(fetched_mail, browser=True).encode('utf-8')) webbrowser.open_new_tab(temp_file.name) temp_file.close() else: sys.stdout.write(fetch.display(fetched_mail))
SystemExit
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/scripts/imap_shell.py/ImapShell.do_read
6,695
def do_search(self, arg): '''Search mail.''' usage = '\n'.join([ 'Usage: search [options]', '', 'Options:', ' -a, --address=<address> Search by address', ' -d, --date=<date> Search by date (YYYY-MM-DD)', ' -s, --size=<SIZE> Search by size (in bytes)', ' -S, --subject=<subject> Search by subject', ' -t, --tags=<tags> Searched tags (Comma separated)', ' -T, --full-text=<text> Searched tags (Comma separated)', ' -h, --help Show help options.', ]) try: args = docopt.docopt(usage, argv=arg) except SystemExit: return if args.get('--tags') is not None: args['--tags'] = args['--tags'].split(',') if args['--date'] is not None: try: date = datetime.datetime.strptime(args['--date'], '%Y-%m-%d') except __HOLE__: date = None else: date = None search_criterion = search.create_search_criterion( address=args['--address'], date=date, subject=args['--subject'], size=args['--size'], tags=args['--tags'], text=args['--full-text'], ) mail_set = search.fetch_uids(self.imap_account, search_criterion=search_criterion) if len(mail_set) == 0: log.error('No mail found') return 0 for mail_info in search.fetch_mails_info(self.imap_account, mail_set=mail_set): sys.stdout.write( u'UID : {:<10} From : {:<40.40} Subject : {:.50}\n'.format( mail_info['uid'], mail_info['from'], mail_info['subject']))
ValueError
dataset/ETHPy150Open Gentux/imap-cli/imap_cli/scripts/imap_shell.py/ImapShell.do_search
6,696
def prompt_for_pw(filename): """Prompt the user for the password to access an input file.""" print 'Please enter a password to decrypt {}.'.format(filename) print '(The password will not be shown. Press ^C to cancel).' try: return getpass.getpass('--> ') except __HOLE__: sys.stderr.write('Aborted by user.\n') sys.exit(2)
KeyboardInterrupt
dataset/ETHPy150Open hellerbarde/stapler/staplelib/iohelper.py/prompt_for_pw
6,697
@staticmethod def load( collada, localscope, skinnode, controllernode ): if len(localscope) < 3: raise DaeMalformedError('Not enough sources in skin') geometry_source = skinnode.get('source') if geometry_source is None or len(geometry_source) < 2 \ or geometry_source[0] != '#': raise DaeBrokenRefError('Invalid source attribute of skin node') if not geometry_source[1:] in collada.geometries: raise DaeBrokenRefError('Source geometry for skin node not found') geometry = collada.geometries[geometry_source[1:]] bind_shape_mat = skinnode.find(tag('bind_shape_matrix')) if bind_shape_mat is None: bind_shape_mat = numpy.identity(4, dtype=numpy.float32) bind_shape_mat.shape = (-1,) else: try: values = [ float(v) for v in bind_shape_mat.text.split()] except ValueError: raise DaeMalformedError('Corrupted bind shape matrix in skin') bind_shape_mat = numpy.array( values, dtype=numpy.float32 ) inputnodes = skinnode.findall('%s/%s'%(tag('joints'), tag('input'))) if inputnodes is None or len(inputnodes) < 2: raise DaeIncompleteError("Not enough inputs in skin joints") try: inputs = [(i.get('semantic'), i.get('source')) for i in inputnodes] except ValueError as ex: raise DaeMalformedError('Corrupted inputs in skin') joint_source = None matrix_source = None for i in inputs: if len(i[1]) < 2 or i[1][0] != '#': raise DaeBrokenRefError('Input in skin node %s not found'%i[1]) if i[0] == 'JOINT': joint_source = i[1][1:] elif i[0] == 'INV_BIND_MATRIX': matrix_source = i[1][1:] weightsnode = skinnode.find(tag('vertex_weights')) if weightsnode is None: raise DaeIncompleteError("No vertex_weights found in skin") indexnode = weightsnode.find(tag('v')) if indexnode is None: raise DaeIncompleteError('Missing indices in skin vertex weights') vcountnode = weightsnode.find(tag('vcount')) if vcountnode is None: raise DaeIncompleteError('Missing vcount in skin vertex weights') inputnodes = weightsnode.findall(tag('input')) try: index = numpy.array([float(v) for v in indexnode.text.split()], dtype=numpy.int32) vcounts = numpy.array([int(v) for v in vcountnode.text.split()], dtype=numpy.int32) inputs = [(i.get('semantic'), i.get('source'), int(i.get('offset'))) for i in inputnodes] except __HOLE__ as ex: raise DaeMalformedError('Corrupted index or offsets in skin vertex weights') weight_joint_source = None weight_source = None offsets = [0, 0] for i in inputs: if len(i[1]) < 2 or i[1][0] != '#': raise DaeBrokenRefError('Input in skin node %s not found' % i[1]) if i[0] == 'JOINT': weight_joint_source = i[1][1:] offsets[0] = i[2] elif i[0] == 'WEIGHT': weight_source = i[1][1:] offsets[1] = i[2] if joint_source is None or weight_source is None: raise DaeMalformedError('Not enough inputs for vertex weights in skin') return Skin(localscope, bind_shape_mat, joint_source, matrix_source, weight_source, weight_joint_source, vcounts, index, offsets, geometry, controllernode, skinnode)
ValueError
dataset/ETHPy150Open pycollada/pycollada/collada/controller.py/Skin.load
6,698
@staticmethod def load( collada, localscope, morphnode, controllernode ): baseid = morphnode.get('source') if len(baseid) < 2 or baseid[0] != '#' or \ not baseid[1:] in collada.geometries: raise DaeBrokenRefError('Base source of morph %s not found' % baseid) basegeom = collada.geometries[baseid[1:]] method = morphnode.get('method') if method is None: method = 'NORMALIZED' if not (method == 'NORMALIZED' or method == 'RELATIVE'): raise DaeMalformedError("Morph method must be either NORMALIZED or RELATIVE. Found '%s'" % method) inputnodes = morphnode.findall('%s/%s'%(tag('targets'), tag('input'))) if inputnodes is None or len(inputnodes) < 2: raise DaeIncompleteError("Not enough inputs in a morph") try: inputs = [(i.get('semantic'), i.get('source')) for i in inputnodes] except __HOLE__ as ex: raise DaeMalformedError('Corrupted inputs in morph') target_source = None weight_source = None for i in inputs: if len(i[1]) < 2 or i[1][0] != '#' or not i[1][1:] in localscope: raise DaeBrokenRefError('Input in morph node %s not found' % i[1]) if i[0] == 'MORPH_TARGET': target_source = localscope[i[1][1:]] elif i[0] == 'MORPH_WEIGHT': weight_source = localscope[i[1][1:]] if not type(target_source) is source.IDRefSource or \ not type(weight_source) is source.FloatSource: raise DaeIncompleteError("Not enough inputs in targets of morph") if len(target_source) != len(weight_source): raise DaeMalformedError("Morph inputs must be of same length") target_list = [] for target, weight in zip(target_source, weight_source): if len(target) < 1 or not(target in collada.geometries): raise DaeBrokenRefError("Targeted geometry %s in morph not found"%target) target_list.append((collada.geometries[target], weight[0])) return Morph(basegeom, target_list, controllernode)
ValueError
dataset/ETHPy150Open pycollada/pycollada/collada/controller.py/Morph.load
6,699
def raise_file_descriptor_limit(): _, hard_nofile = resource.getrlimit(resource.RLIMIT_NOFILE) nofile_target = hard_nofile if os.geteuid() == 0: nofile_target = 1024 * 64 # Now bump up max filedescriptor limit as high as possible while True: try: hard_nofile = nofile_target resource.setrlimit(resource.RLIMIT_NOFILE, (nofile_target, hard_nofile)) except __HOLE__: nofile_target /= 1024 break
ValueError
dataset/ETHPy150Open swiftstack/ssbench/ssbench/util.py/raise_file_descriptor_limit