Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
9,000
def identical(self, other): """Like equals, but also checks attributes. """ try: return (utils.dict_equiv(self.attrs, other.attrs) and self.equals(other)) except (__HOLE__, AttributeError): return False
TypeError
dataset/ETHPy150Open pydata/xarray/xarray/core/variable.py/Variable.identical
9,001
def find_length(owtf, http_helper, lsig, url, method, detection_struct, ch, headers, body=None): """This function finds the length of the fuzzing placeholder""" size = 8192 minv = 0 http_client = HTTPClient() new_url = url new_body = body new_headers = headers payload = "" for loop in range(0, 15): # used to avoid potential deadloops payload = size * ch if lsig in url: new_url = url.replace(lsig, payload) elif body is not None and lsig in body: new_body = body.replace(lsig, payload) elif headers is not None and lsig in str(headers): raw_val = str(headers) raw_val = raw_val.replace(lsig, payload) new_headers = ast.literal_eval(str(raw_val)) else: Error(owtf, "Length signature not found!") request = http_helper.create_http_request(method, new_url, new_body, new_headers) try: response = http_client.fetch(request) except __HOLE__ as e: if e.response: response = e.response for struct in detection_struct: if struct["method"](response, struct["arguments"]): http_client.close() return binary_search( http_helper, lsig, minv, size, url, method, detection_struct, ch, headers, body) minv = size size *= 2
HTTPError
dataset/ETHPy150Open owtf/owtf/framework/http/wafbypasser/core/placeholder_length.py/find_length
9,002
def binary_search(http_helper, lsig, minv, maxv, url, method, detection_struct, ch, headers, body=None): mid = mid_value(minv, maxv) new_url = url new_body = body new_headers = headers if minv > maxv: return maxv http_client = HTTPClient() payload = ch * mid if lsig in url: new_url = url.replace(lsig, payload) # warning urlencode and etc elif body is not None and lsig in body: new_body = body.replace(lsig, payload) elif headers is not None and lsig in headers: raw_val = str(headers) raw_val = raw_val.replace(lsig, payload) new_headers = ast.literal_eval(str(raw_val)) request = http_helper.create_http_request(method, new_url, new_body, new_headers) try: response = http_client.fetch(request) except __HOLE__ as e: response = e.response for struct in detection_struct: if struct["method"](response, struct["arguments"]): http_client.close() return binary_search(http_helper, lsig, minv, mid - 1, url, method, detection_struct, ch, headers, body) http_client.close() return binary_search(http_helper, lsig, mid + 1, maxv, url, method, detection_struct, ch, headers, body)
HTTPError
dataset/ETHPy150Open owtf/owtf/framework/http/wafbypasser/core/placeholder_length.py/binary_search
9,003
def is_numeric(s): try: int(s) return True except __HOLE__: return False
ValueError
dataset/ETHPy150Open duydao/Text-Pastry/text_pastry.py/is_numeric
9,004
def run(self, history=False, settings=False, back=True): if not self.window.active_view(): return if not hasattr(self, 'history_manager'): self.history_manager = OverlayHistoryManager() self.back = back try: selection_count = len(self.window.active_view().sel()) if history: self.create_history() elif settings: self.create_settings() else: self.create_main() if self.overlay and self.overlay.is_valid(): self.show_quick_panel(self.overlay.items(), self.on_done, sublime.MONOSPACE_FONT) except __HOLE__: sublime.status_message("Error while showing Text Pastry overlay")
ValueError
dataset/ETHPy150Open duydao/Text-Pastry/text_pastry.py/TextPastryShowMenu.run
9,005
def run(self, edit): try: text = sublime.get_clipboard() if text is not None and len(text) > 0: regions = [] sel = self.view.sel() items = text.split("\n") if len(items) == 1: items = [text] strip = True for idx, region in enumerate(sel): if idx < len(items): row = items[idx].strip() if region.empty(): sublime.status_message("empty") row = self.view.substr(self.view.line(self.view.line(region).begin() - 1)) + "\n" i = 0 if len(row.strip()): i = self.view.insert(edit, region.end(), row) regions.append(sublime.Region(region.end() + i, region.end() + i)) else: sublime.status_message("selection") self.view.replace(edit, region, row) i = len(row) regions.append(sublime.Region(region.begin() + i, region.begin() + i)) sel.clear() for region in regions: sel.add(region) pass else: sublime.status_message("No text found for Insert Text, canceled") except __HOLE__: sublime.status_message("Error while executing Insert Text, canceled") pass
ValueError
dataset/ETHPy150Open duydao/Text-Pastry/text_pastry.py/TextPastryPasteCommand.run
9,006
def run(self, edit, command, args=None, text=None, separator=None, items=None): try: cmd = Command.create(command, args) if cmd: items = items if text: items = text.split(separator) cmd.init(self.view, items) regions = [] sel = self.view.sel() index = 0 last_region = None for region in sel: if cmd.has_next(): value = cmd.next(self.view.substr(region), index, region) if value is not None: self.view.replace(edit, region, value) regions.append(region) else: break last_region = region index += 1 if not global_settings("keep_selection", False): [sel.subtract(region) for region in regions] # add cursor if there is none in the current view if len(sel) == 0: sel.add(sublime.Region(last_region.end(), last_region.end())) sublime.status_message("Command done: " + command) else: sublime.error_message("Command not found: " + command) except __HOLE__: sublime.status_message("Error while executing Text Pastry Command, canceled") pass
ValueError
dataset/ETHPy150Open duydao/Text-Pastry/text_pastry.py/TextPastryCommandWrapperCommand.run
9,007
def parse_date(self, s): date = None parse_date_formats = global_settings("parse_date_formats", []) for fmt in parse_date_formats: try: date = datetime.datetime.strptime(s, fmt) except __HOLE__: pass return date
ValueError
dataset/ETHPy150Open duydao/Text-Pastry/text_pastry.py/TextPastryDateRangeCommand.parse_date
9,008
def add_years(self, d, years): if years == 0: return d try: return d.replace(year = d.year + years) except __HOLE__: return d.replace(day = 28).replace(year = d.year + years)
ValueError
dataset/ETHPy150Open duydao/Text-Pastry/text_pastry.py/TextPastryDateRangeCommand.add_years
9,009
def add_months(self, d, m): if m == 0: return d years = 0 months = m + d.month years = int(months / 12) months = int(months % 12) if months == 0: years -= 1 months = 12 try: return self.add_years(d, years).replace(month = months) except __HOLE__: years = 0 months = m + d.month + 1 years = int(months / 12) months = int(months % 12) if months == 0: years -= 1 months = 12 return self.add_years(d, years).replace(day = 1).replace(month = months) - datetime.timedelta(days = 1)
ValueError
dataset/ETHPy150Open duydao/Text-Pastry/text_pastry.py/TextPastryDateRangeCommand.add_months
9,010
def _parse_cron_yaml(self): """Loads the cron.yaml file and parses it. Returns: A croninfo.CronInfoExternal containing cron jobs. Raises: yaml_errors.Error, StandardError: The cron.yaml was invalid. """ for cron_yaml in ('cron.yaml', 'cron.yml'): try: with open(os.path.join(self.configuration.modules[0].application_root, cron_yaml)) as f: cron_info = croninfo.LoadSingleCron(f) return cron_info except __HOLE__: continue return None
IOError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/devappserver2/admin/cron_handler.py/CronHandler._parse_cron_yaml
9,011
def get(self, key, default=None): """Get a string representing all headers with a particular value, with multiple headers separated by a comma. If no header is found return a default value :param key: The header name to look up (case-insensitive) :param default: The value to return in the case of no match """ try: return self[key] except __HOLE__: return default
KeyError
dataset/ETHPy150Open w3c/wptserve/wptserve/request.py/RequestHeaders.get
9,012
def get_list(self, key, default=missing): """Get all the header values for a particular field name as a list""" try: return dict.__getitem__(self, key.lower()) except __HOLE__: if default is not missing: return default else: raise
KeyError
dataset/ETHPy150Open w3c/wptserve/wptserve/request.py/RequestHeaders.get_list
9,013
def __init__(self, *args, **kwds): # pylint: disable=E1003 '''Initialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. ''' super(OrderedDict, self).__init__() # pylint: disable=E1003 if len(args) > 1: raise TypeError( 'expected at most 1 arguments, got {0}'.format(len(args)) ) try: self.__root except __HOLE__: self.__root = root = [] # sentinel node root[:] = [root, root, None] self.__map = {} self.__update(*args, **kwds)
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/utils/odict.py/OrderedDict.__init__
9,014
def clear(self): 'od.clear() -> None. Remove all items from od.' try: for node in six.itervalues(self.__map): del node[:] root = self.__root root[:] = [root, root, None] self.__map.clear() except __HOLE__: pass dict.clear(self)
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/utils/odict.py/OrderedDict.clear
9,015
def __getitem__(self, key): try: return OrderedDict.__getitem__(self, key) except __HOLE__: return self.__missing__(key)
KeyError
dataset/ETHPy150Open saltstack/salt/salt/utils/odict.py/DefaultOrderedDict.__getitem__
9,016
def _cleanup(request): """ Delete the context manager and everything else. """ del request._in_do del request._chunked del request._charset del request._unhandled del request._context try: del request._gen except __HOLE__: del request._callbacks del request._waiting return # Cleanup any timers. for item in request._waiting: timer = getattr(item, "timeout", None) if timer and callable(timer): try: timer() except Exception: # Who knows what could happen here. pass # Asynchronous Internals request._callbacks.clear() del request._callbacks del request._waiting
AttributeError
dataset/ETHPy150Open ecdavis/pants/pants/web/asynchronous.py/_cleanup
9,017
def _do(request, input, as_exception=False): """ Send the provided input to the asynchronous request handler for *request*. If ``as_exception`` is truthy, throw it into the generator as an exception, otherwise it's just sent. """ if request._in_do: # Let's not enter some bizarre stack recursion that can cause all sorts # of badness today, shall we? Put off the next _do till the next # engine cycle. request.connection.engine.callback(_do, request, input, as_exception) return try: request._in_do = True while True: errored = False with request._context as app: # Make sure we're connected. if not request.connection.connected: try: # Bubble up an error so the user's code can do something # about this. request._gen.throw(RequestClosed()) except RequestClosed: # Don't react at all to our own exception. pass except Exception: # Just log any other exception. The request is already # closed, so there's not a lot *else* to do. log.exception("Error while cleaning up closed " "asynchronous request: %s %s" % (request.method, request.url)) finally: _cleanup(request) return try: if as_exception: output = request._gen.throw(input) else: output = request._gen.send(input) except __HOLE__: # We've run out of content. Setting output to Finished # tells the output handler to close up and go home. output = Finished except HTTPException as err: if request._started: log.exception("Error while handling asynchronous " "request: %s %s" % (request.method, request.url)) request.connection.close(False) _cleanup(request) return errored = True request._tb = traceback.format_exc() err_handler = getattr(app, "handle_%d" % err.status, None) if err_handler: output = err_handler(request, err) else: output = error(err.message, err.status, err.headers, request=request) except HTTPTransparentRedirect as err: if request._started: log.exception("HTTPTransparentRedirect sent to already " "started request: %s %s" % (request.method, request.url)) request.connection.close(False) _cleanup(request) return errored = True output = err request._tb = traceback.format_exc() except Exception as err: if request._started: log.exception("Error while handling asynchronous " "request: %s %s" % (request.method, request.url)) request.connection.close(False) _cleanup(request) return errored = True request._tb = traceback.format_exc() try: output = app.handle_500(request, err) except Exception: # There's an error with the handle_500 function. log.exception("There was a problem handling a request, and a " "problem running Application.handle_500 for %r." % app) output = error(500, request=request) # Did we error? if errored: # Clear the rule data, because errors don't care about it. request._rule_content_type = None request._rule_headers = None _async_finish(request, output) return # Returning a list of Callback instances is the only way to control # exactly what you're waiting for. if not isinstance(output, _WaitList) and \ isinstance(output, (tuple, list)) and \ all(isinstance(x, Callback) for x in output): output = _WaitList(output) # Now that we're out of the request context, let's see what we've got to # work with. if isinstance(output, _Sleeper): # Just sleep. request.connection.engine.defer(output[0], _do, request, None) elif isinstance(output, Callback): # Shove the callback onto its own waiting list. request._unhandled.remove(output) request._waiting.append(output) elif isinstance(output, _WaitList): # Push the WaitList onto the waiting list. if output.timeout: output.timeout = request.connection.engine.defer(output.timeout, _wait_timeout, request) request._waiting.append(output) elif isinstance(output, _Receiver): # Push the Receiver onto the waiting list. if output[1]: output.timeout = request.connection.engine.defer(output[1], _receive_timeout, request) output.ref = ref = weakref.ref(request) receivers.setdefault(output[0], []).append(ref) request._waiting.append(output) else: # We've received some content, so write it out. if request._writer(request, output) is Again: input = None as_exception = False continue # We *have* to continue if we don't want to break. break finally: if hasattr(request, "_in_do"): request._in_do = False
StopIteration
dataset/ETHPy150Open ecdavis/pants/pants/web/asynchronous.py/_do
9,018
def callback(self, filename, lines, **kwargs): """publishes lines one by one to the given topic""" timestamp = self.get_timestamp(**kwargs) if kwargs.get('timestamp', False): del kwargs['timestamp'] for line in lines: try: import warnings with warnings.catch_warnings(): warnings.simplefilter('error') self._client.publish(self._topic, self.format(filename, line, timestamp, **kwargs), 0) except Exception, e: try: raise TransportException(e.strerror) except __HOLE__: raise TransportException('Unspecified exception encountered')
AttributeError
dataset/ETHPy150Open python-beaver/python-beaver/beaver/transports/mqtt_transport.py/MqttTransport.callback
9,019
def deserialize_json(self, json_value): try: value = json.loads(json_value) except __HOLE__: raise DeserializationError("Invalid JSON value for \"{}\": \"{}\"!".format(self.name, json_value), json_value, self.name) else: if value is not None: if not isinstance(value, list): raise DeserializationError("\"{}\" is not a JSON array!".format(json_value), json_value, self.name) else: if self._value_option: return [self._value_option.deserialize_json(json.dumps(v)) for v in value] else: return value else: return None
ValueError
dataset/ETHPy150Open GreatFruitOmsk/nativeconfig/nativeconfig/options/array_option.py/ArrayOption.deserialize_json
9,020
def wordBreak_TLE(self, s, dict): """ TLE dfs O(n^2) Algorithm: DFS. The reason is that DFS repeatedly calculate whether a certain part of string can be segmented. Therefore we can use dynamic programming. :param s: a string :param dict: a set of string :return: a boolean """ string_builder = "" if s=="": return True # greedy for i in range(len(s)): string_builder += s[i] if string_builder in dict: try: if self.wordBreak_TLE(s[i+1:], dict): return True else: continue except __HOLE__: return True return False
IndexError
dataset/ETHPy150Open algorhythms/LeetCode/139 Word Break.py/Solution.wordBreak_TLE
9,021
def wordBreak(self, s, dict): """ __ __________ ___ __ ______ ______ .__ __. _______. | | | ____\ \ / / | | / | / __ \ | \ | | / | | | | |__ \ V / | | | ,----'| | | | | \| | | (----` | | | __| > < | | | | | | | | | . ` | \ \ | `----.| |____ / . \ | | | `----.| `--' | | |\ | .----) | |_______||_______/__/ \__\ |__| \______| \______/ |__| \__| |_______/ Dynamic programming The dynamic solution can tell us whether the string can be broken to words, but can not tell us what words the string is broken to. O(n*m) Google On Campus Presentation, demonstration questions. 4 Sep 2014, Nanyang Technological University, Singapore dp[i] rolling dp (rather than using 2D dp[i, j] dp[i] means s[:i] can be made up of sequence of lexicons - l e e t c o d e T F F F T F F F T Lexicons = {the, theta, table, down, there, bled, own} - t h e t a b l e d o w n t h e r e T F F T F T F F T T F F T F F F F T :param s: a string :param dict: a set of string :return: a boolean """ dp = [False] * (len(s)+1) dp[0] = True # dummy for i in range(len(dp)): # [0, len(s)+1) # continue from matched condition if dp[i]: for word in dict: try: # trivial if dp[i+len(word)]==True: continue # main if s[i:i+len(word)]==word: # test whether [i, i+len) can construct a word. THE BEAUTY OF HALF OPEN dp[i+len(word)] = True # record the checking except __HOLE__: continue return dp[-1]
IndexError
dataset/ETHPy150Open algorhythms/LeetCode/139 Word Break.py/Solution.wordBreak
9,022
def __init__(self, value, priority=0): value = unicode(value).strip() media_type = value.split(';') media_type, params = media_type[0].strip(), dict((i.strip() for i in p.split('=', 1)) for p in media_type[1:] if '=' in p) mt = self._MEDIA_TYPE_RE.match(media_type) if not mt: raise ValueError("Not a correctly formatted internet media type (%r)" % media_type) mt = mt.groupdict() try: self.quality = float(params.pop('q', 1)) except __HOLE__: self.quality = 1 self.type = mt.get('type'), mt.get('subtype'), mt.get('subsubtype') self.specifity = len([t for t in self.type if t]) self.params = params self.value = value self.priority = priority
ValueError
dataset/ETHPy150Open mollyproject/mollyproject/molly/utils/http.py/MediaType.__init__
9,023
@app.template_filter(name='truncatechars') def truncatechars(value, arg): """ Truncates a string after a certain number of chars. Argument: Number of chars to truncate after. """ try: length = int(arg) except __HOLE__: # Invalid literal for int(). return value # Fail silently. if len(value) > length: return value[:length] + '...' return value
ValueError
dataset/ETHPy150Open dcramer/sentry-old/sentry/web/templatetags.py/truncatechars
9,024
def shutdown(self, timeout=None): """ Optional. """ try: shutdown_method = self._proxied_manager.shutdown except __HOLE__: return shutdown_method(timeout)
AttributeError
dataset/ETHPy150Open galaxyproject/pulsar/pulsar/managers/__init__.py/ManagerProxy.shutdown
9,025
def test_remove_invalid_reverse_domain(self): rd1 = self.create_domain(name='130', ip_type='4') rd1.save() rd2 = self.create_domain(name='130.193', ip_type='4') rd2.save() rd3 = self.create_domain(name='130.193.8', ip_type='4') rd3.save() try: rd1.delete() except __HOLE__, e: pass self.assertEqual(ValidationError, type(e))
ValidationError
dataset/ETHPy150Open rtucker-mozilla/mozilla_inventory/mozdns/domain/tests/reverse_tests.py/ReverseDomainTests.test_remove_invalid_reverse_domain
9,026
def test_add_reverse_domains(self): try: self.create_domain(name='192.168', ip_type='4').save() except __HOLE__, e: pass self.assertEqual(ValidationError, type(e)) e = None rdx = self.create_domain(name='192', ip_type='4') rdx.save() rdy = self.create_domain(name='192.168', ip_type='4') rdy.save() try: self.create_domain(name='192.168', ip_type='4').save() except ValidationError, e: pass self.assertEqual(ValidationError, type(e)) e = None self.create_domain(name='128', ip_type='4').save() rd0 = self.create_domain(name='128.193', ip_type='4') rd0.save() ip1 = self.add_ptr_ipv4('128.193.8.1') self.assertEqual(ip1.reverse_domain, rd0) ip2 = self.add_ptr_ipv4('128.193.8.2') self.assertEqual(ip2.reverse_domain, rd0) ip3 = self.add_ptr_ipv4('128.193.8.3') self.assertEqual(ip3.reverse_domain, rd0) ip4 = self.add_ptr_ipv4('128.193.8.4') self.assertEqual(ip4.reverse_domain, rd0) rd1 = self.create_domain(name='128.193.8', ip_type='4') rd1.save() ptr1 = PTR.objects.filter(ip_lower=ipaddr.IPv4Address( '128.193.8.1').__int__(), ip_type='4')[0] self.assertEqual(ptr1.reverse_domain, rd1) ptr2 = PTR.objects.filter(ip_lower=ipaddr.IPv4Address( '128.193.8.2').__int__(), ip_type='4')[0] self.assertEqual(ptr2.reverse_domain, rd1) ptr3 = PTR.objects.filter(ip_lower=ipaddr.IPv4Address( '128.193.8.3').__int__(), ip_type='4')[0] self.assertEqual(ptr3.reverse_domain, rd1) ptr4 = PTR.objects.filter(ip_lower=ipaddr.IPv4Address( '128.193.8.4').__int__(), ip_type='4')[0] self.assertEqual(ptr4.reverse_domain, rd1) rd1.delete() ptr1 = PTR.objects.filter(ip_lower=ipaddr.IPv4Address( '128.193.8.1').__int__(), ip_type='4')[0] self.assertEqual(ptr1.reverse_domain, rd0) ptr2 = PTR.objects.filter(ip_lower=ipaddr.IPv4Address( '128.193.8.2').__int__(), ip_type='4')[0] self.assertEqual(ptr2.reverse_domain, rd0) ptr3 = PTR.objects.filter(ip_lower=ipaddr.IPv4Address( '128.193.8.2').__int__(), ip_type='4')[0] self.assertEqual(ptr3.reverse_domain, rd0) ptr4 = PTR.objects.filter(ip_lower=ipaddr.IPv4Address( '128.193.8.3').__int__(), ip_type='4')[0] self.assertEqual(ptr4.reverse_domain, rd0)
ValidationError
dataset/ETHPy150Open rtucker-mozilla/mozilla_inventory/mozdns/domain/tests/reverse_tests.py/ReverseDomainTests.test_add_reverse_domains
9,027
def test_boot_strap_add_ipv6_domain(self): osu_block = "2.6.2.1.1.0.5.F.0.0.0" test_dname = osu_block + ".d.e.a.d.b.e.e.f" boot_strap_ipv6_reverse_domain(test_dname) try: self.create_domain( name='2.6.2.1.1.0.5.f.0.0.0', ip_type='6').save() except ValidationError, e: pass self.assertEqual(ValidationError, type(e)) e = None try: self.create_domain(name='2.6.2.1', ip_type='6').save() except __HOLE__, e: pass self.assertEqual(ValidationError, type(e)) e = None try: self.create_domain( name='2.6.2.1.1.0.5.F.0.0.0.d.e.a.d', ip_type='6').save() except ValidationError, e: pass self.assertEqual(ValidationError, type(e)) e = None try: self.create_domain( name='2.6.2.1.1.0.5.F.0.0.0.d.e.a.d.b.e.e.f', ip_type='6').save() except ValidationError, e: pass self.assertEqual(ValidationError, type(e)) e = None try: self.create_domain(name=test_dname, ip_type='6').save() except ValidationError, e: pass self.assertEqual(ValidationError, type(e)) e = None # These should pass boot_strap_ipv6_reverse_domain('7.6.2.4') boot_strap_ipv6_reverse_domain('6.6.2.5.1') # These are pretty unrealistic since they prodtrude into the host part # of the address. boot_strap_ipv6_reverse_domain( '4.6.2.2.1.0.5.3.f.0.0.0.1.2.3.4.1.2.3.4.1.2.3.4.1.2.3.4.1.2.3.4') boot_strap_ipv6_reverse_domain( '5.6.2.3.1.0.5.3.f.0.0.0.1.2.3.4.1.2.3.4.1.2.3.4')
ValidationError
dataset/ETHPy150Open rtucker-mozilla/mozilla_inventory/mozdns/domain/tests/reverse_tests.py/ReverseDomainTests.test_boot_strap_add_ipv6_domain
9,028
def test_add_reverse_domainless_ips(self): e = None try: self.add_ptr_ipv4('8.8.8.8') except __HOLE__, e: pass self.assertEqual(ValidationError, type(e)) e = None try: self.add_ptr_ipv6('2001:0db8:85a3:0000:0000:8a2e:0370:733') except ValidationError, e: pass self.assertEqual(ValidationError, type(e)) e = None boot_strap_ipv6_reverse_domain("2.0.0.1") try: self.create_domain(name='2.0.0.1', ip_type='6').save() except ValidationError, e: pass self.assertEqual(ValidationError, type(e)) e = None self.add_ptr_ipv6('2001:0db8:85a3:0000:0000:8a2e:0370:733')
ValidationError
dataset/ETHPy150Open rtucker-mozilla/mozilla_inventory/mozdns/domain/tests/reverse_tests.py/ReverseDomainTests.test_add_reverse_domainless_ips
9,029
def test_master_reverse_ipv6_domains(self): rds = [] rd = self.create_domain(name='1', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain( name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0.0.0.0.0.0', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0.0.0.0.0.0.3', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0.0.0.0.0.0.3.2', ip_type='6') rd.save() rds.append(rd) rd = self.create_domain(name='1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0.0' '.0.0.0.0.0.0.0.0.0.3.2.1', ip_type='6') rd.save() rds.append(rd) for rd in list(enumerate(rds)): if rd[0] == 0: self.assertEqual(rd[1].master_domain, self.i6_arpa) else: self.assertEqual(rd[1].master_domain, rds[rd[0] - 1]) self.assertTrue(rd[1].get_absolute_url()) self.assertTrue(rd[1].get_edit_url()) self.assertTrue(rd[1].get_delete_url()) try: Domain.objects.filter( name=ip_to_domain_name('1.2.8.3.0.0.0.0.4.3.4.5.6.6.5.6.7.0.0', ip_type='6'))[0].delete() except __HOLE__, e: pass self.assertEqual(ValidationError, type(e))
ValidationError
dataset/ETHPy150Open rtucker-mozilla/mozilla_inventory/mozdns/domain/tests/reverse_tests.py/ReverseDomainTests.test_master_reverse_ipv6_domains
9,030
def get_global_step_var(): """ :returns: the global_step variable in the current graph. create if not existed""" try: return tf.get_default_graph().get_tensor_by_name(GLOBAL_STEP_VAR_NAME) except __HOLE__: var = tf.Variable( 0, trainable=False, name=GLOBAL_STEP_OP_NAME) return var
KeyError
dataset/ETHPy150Open ppwwyyxx/tensorpack/tensorpack/tfutils/common.py/get_global_step_var
9,031
@patch('prestoadmin.util.application.os.path.exists') @patch('prestoadmin.util.application.sys.stderr') def test_configures_invalid_log_file( self, stderr_mock, path_exists_mock, logging_mock, filesystem_mock ): path_exists_mock.return_value = True expected_error = FakeError('Error') logging_mock.fileConfig.side_effect = expected_error try: with Application(APPLICATION_NAME): pass except __HOLE__ as e: self.assertEqual('Error', e.message) stderr_mock.write.assert_has_calls( [ call('Please run %s with sudo.\n' % APPLICATION_NAME), ] )
SystemExit
dataset/ETHPy150Open prestodb/presto-admin/tests/unit/util/test_application.py/ApplicationTest.test_configures_invalid_log_file
9,032
def run(self, rows, column_names): """ Apply type inference to the provided data and return an array of column types. :param rows: The data as a sequence of any sequences: tuples, lists, etc. """ num_columns = len(column_names) hypotheses = [set(self._possible_types) for i in range(num_columns)] force_indices = [] for name in self._force.keys(): try: force_indices.append(column_names.index(name)) except __HOLE__: raise ValueError('"%s" does not match the name of any column in this table.' % name) if self._limit: sample_rows = rows[:self._limit] elif self._limit == 0: text = Text() return tuple([text] * num_columns) else: sample_rows = rows for row in sample_rows: for i in range(num_columns): if i in force_indices: continue h = hypotheses[i] if len(h) == 1: continue for column_type in copy(h): if len(row) > i and not column_type.test(row[i]): h.remove(column_type) column_types = [] for i in range(num_columns): if i in force_indices: column_types.append(self._force[column_names[i]]) continue h = hypotheses[i] # Select in prefer order for t in self._possible_types: if t in h: column_types.append(t) break return tuple(column_types)
ValueError
dataset/ETHPy150Open wireservice/agate/agate/type_tester.py/TypeTester.run
9,033
def from_map(self, table, inconstrs, target='', rtables=None): """Initialize the dictionary of constraints by converting the input map :param table: table affected by the constraints :param inconstrs: YAML map defining the constraints """ if 'check_constraints' in inconstrs: chks = inconstrs['check_constraints'] for cns in chks: check = CheckConstraint(table=table.name, schema=table.schema, name=cns) val = chks[cns] try: check.expression = val['expression'] except KeyError as exc: exc.args = ("Constraint '%s' is missing expression" % cns, ) raise if check.expression[0] == '(' and check.expression[-1] == ')': check.expression = check.expression[1:-1] if 'columns' in val: check.col_names = val['columns'] if target: check.target = target if 'description' in val: check.description = val['description'] if 'inherited' in val: check.inherited = val['inherited'] self[(table.schema, table.name, cns)] = check if 'primary_key' in inconstrs: cns = list(inconstrs['primary_key'].keys())[0] pkey = PrimaryKey(table=table.name, schema=table.schema, name=cns) val = inconstrs['primary_key'][cns] try: pkey.col_names = val['columns'] pkey.col_idx = self._get_col_idx(inconstrs['columns'], pkey.col_names) except KeyError as exc: exc.args = ("Constraint '%s' is missing columns" % cns, ) raise for attr, value in list(val.items()): if attr in COMMON_ATTRS: setattr(pkey, attr, value) self[(table.schema, table.name, cns)] = pkey if 'foreign_keys' in inconstrs: fkeys = inconstrs['foreign_keys'] for cns in fkeys: fkey = ForeignKey(table=table.name, schema=table.schema, name=cns) val = fkeys[cns] if 'on_update' in val: act = val['on_update'] if act.lower() not in list(ACTIONS.values()): raise ValueError("Invalid action '%s' for constraint " "'%s'" % (act, cns)) fkey.on_update = act if 'on_delete' in val: act = val['on_delete'] if act.lower() not in list(ACTIONS.values()): raise ValueError("Invalid action '%s' for constraint " "'%s'" % (act, cns)) fkey.on_delete = act if 'deferrable' in val: fkey.deferrable = True if 'deferred' in val: fkey.deferred = True if 'match' in val: mat = val['match'] if mat.lower() not in list(self.match_types.values()): raise ValueError("Invalid match type '%s' for " "constraint '%s'" % (mat, cns)) fkey.match = mat try: fkey.col_names = val['columns'] fkey.col_idx = self._get_col_idx(inconstrs['columns'], fkey.col_names) except KeyError as exc: exc.args = ("Constraint '%s' is missing columns" % cns, ) raise try: refs = val['references'] except KeyError as exc: exc.args = ("Constraint '%s' missing references" % cns, ) raise try: fkey.ref_table = refs['table'] except KeyError as exc: exc.args = ("Constraint '%s' missing table reference" % cns, ) raise try: fkey.ref_col_names = refs['columns'] rtable_key = 'table ' + fkey.ref_table if rtables and rtable_key in rtables: ref_table = rtables[rtable_key] fkey.ref_col_idxs = \ self._get_col_idx(ref_table['columns'], fkey.ref_col_names) except KeyError as exc: exc.args = ("Constraint '%s' missing reference columns" % cns, ) raise sch = table.schema if 'schema' in refs: sch = refs['schema'] fkey.ref_schema = sch if 'description' in val: fkey.description = val['description'] self[(table.schema, table.name, cns)] = fkey if 'unique_constraints' in inconstrs: uconstrs = inconstrs['unique_constraints'] for cns in uconstrs: unq = UniqueConstraint(table=table.name, schema=table.schema, name=cns) val = uconstrs[cns] try: unq.col_names = val['columns'] unq.col_idx = self._get_col_idx(inconstrs['columns'], unq.col_names) except __HOLE__ as exc: exc.args = ("Constraint '%s' is missing columns" % cns, ) raise for attr, value in list(val.items()): if attr in COMMON_ATTRS: setattr(unq, attr, value) self[(table.schema, table.name, cns)] = unq
KeyError
dataset/ETHPy150Open perseas/Pyrseas/pyrseas/dbobject/constraint.py/ConstraintDict.from_map
9,034
def load_cookie(self, resp): cookie = http_cookies.SimpleCookie() try: cookie.load(resp.headers['Set-Cookie']) return cookie except __HOLE__: return None
KeyError
dataset/ETHPy150Open allisson/gunstar/tests/test_session.py/SessionTest.load_cookie
9,035
@staticmethod def _x_user_parser(user, data): _user = data.get('user_info', {}) user.email = _user.get('email') user.gender = _user.get('gender') user.id = _user.get('id') or _user.get('uid') user.locale = _user.get('default_lang') user.name = _user.get('full_name') user.nickname = _user.get('nick_name') user.picture = 'http://avatars.plurk.com/{0}-big2.jpg'.format(user.id) user.timezone = _user.get('timezone') user.username = _user.get('display_name') user.link = 'http://www.plurk.com/{0}/'.format(user.username) user.city, user.country = _user.get('location', ',').split(',') user.city = user.city.strip() user.country = user.country.strip() _bd = _user.get('date_of_birth') if _bd: try: user.birth_date = datetime.datetime.strptime( _bd, "%a, %d %b %Y %H:%M:%S %Z" ) except __HOLE__: pass return user
ValueError
dataset/ETHPy150Open peterhudec/authomatic/authomatic/providers/oauth1.py/Plurk._x_user_parser
9,036
def _topological_sort(data, head, top_node, raise_exception = False, result = None, visited = None): """ Internal function """ if not result: result = [] if not visited: visited = [] deps = data.get(head, list()) if head in visited: if head == top_node and raise_exception: raise DagError(head, head, result) return result visited.append(head) for i in deps: try: result.index(i) except __HOLE__: #the item does not exist result = _topological_sort(data, i, top_node, raise_exception, result, visited) result.append(head) return result
ValueError
dataset/ETHPy150Open aldebaran/qibuild/python/qisys/sort.py/_topological_sort
9,037
def store_get_async(self, key, callback): try: value = self.store_get(key) callback(self, key, value) except __HOLE__: callback(self, key, None)
KeyError
dataset/ETHPy150Open kivy/kivy/kivy/storage/__init__.py/AbstractStore.store_get_async
9,038
def get_attribute(self, node, attr): try: attribute = node.attributes.get(attr) if attribute is not None: return attribute.value except __HOLE__: pass return None
KeyError
dataset/ETHPy150Open VisTrails/VisTrails/contrib/cdat/scripts/parse_cdat_xml_file.py/XMLNode.get_attribute
9,039
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") fields_iter = iter(self._meta.fields) if not kwargs: # The ordering of the izip calls matter - izip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in izip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. for val, field in izip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ManyToOneRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: val = kwargs.pop(field.attname, field.get_default()) else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in kwargs.keys(): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except __HOLE__: pass if kwargs: raise TypeError, "'%s' is an invalid keyword argument for this function" % kwargs.keys()[0] signals.post_init.send(sender=self.__class__, instance=self)
AttributeError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/db/models/base.py/Model.__init__
9,040
def __repr__(self): try: u = unicode(self) except (UnicodeEncodeError, __HOLE__): u = '[Bad Unicode data]' return smart_str(u'<%s: %s>' % (self.__class__.__name__, u))
UnicodeDecodeError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/db/models/base.py/Model.__repr__
9,041
def _collect_sub_objects(self, seen_objs, parent=None, nullable=False): """ Recursively populates seen_objs with all objects related to this object. When done, seen_objs.items() will be in the format: [(model_class, {pk_val: obj, pk_val: obj, ...}), (model_class, {pk_val: obj, pk_val: obj, ...}), ...] """ pk_val = self._get_pk_val() if seen_objs.add(self.__class__, pk_val, self, parent, nullable): return for related in self._meta.get_all_related_objects(): rel_opts_name = related.get_accessor_name() if isinstance(related.field.rel, OneToOneRel): try: sub_obj = getattr(self, rel_opts_name) except __HOLE__: pass else: sub_obj._collect_sub_objects(seen_objs, self.__class__, related.field.null) else: # To make sure we can access all elements, we can't use the # normal manager on the related object. So we work directly # with the descriptor object. for cls in self.__class__.mro(): if rel_opts_name in cls.__dict__: rel_descriptor = cls.__dict__[rel_opts_name] break else: raise AssertionError("Should never get here.") delete_qs = rel_descriptor.delete_manager(self).all() for sub_obj in delete_qs: sub_obj._collect_sub_objects(seen_objs, self.__class__, related.field.null) # Handle any ancestors (for the model-inheritance case). We do this by # traversing to the most remote parent classes -- those with no parents # themselves -- and then adding those instances to the collection. That # will include all the child instances down to "self". parent_stack = [p for p in self._meta.parents.values() if p is not None] while parent_stack: link = parent_stack.pop() parent_obj = getattr(self, link.name) if parent_obj._meta.parents: parent_stack.extend(parent_obj._meta.parents.values()) continue # At this point, parent_obj is base class (no ancestor models). So # delete it and all its descendents. parent_obj._collect_sub_objects(seen_objs)
ObjectDoesNotExist
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/db/models/base.py/Model._collect_sub_objects
9,042
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): op = is_next and 'gt' or 'lt' order = not is_next and '-' or '' param = smart_str(getattr(self, field.attname)) q = Q(**{'%s__%s' % (field.name, op): param}) q = q|Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order) try: return qs[0] except __HOLE__: raise self.DoesNotExist, "%s matching query does not exist." % self.__class__._meta.object_name
IndexError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/db/models/base.py/Model._get_next_or_previous_by_FIELD
9,043
def name_from_signature(sig): """ Takes a method signature. Returns the method's name. Usage: >>> name_from_signature('spam_eggs(arg=<str>, arg2=<str>) -> <str>') 'spam_eggs' >>> """ try: return re.match(SIG_RE, sig).group('name') except __HOLE__: raise ValueError( u'Method signature syntax "{sig}" is incorrect.'.format(sig=sig))
AttributeError
dataset/ETHPy150Open orokusaki/django-jsonrpc-2-0/jsonrpc/signatures.py/name_from_signature
9,044
def params_from_signature(sig): """ Takes a method signature, such as ``sig_example``. Returns a list of 3-tuples, each with a parameter, it's type, and whether it's optional. Usage: >>> params_from_signature('spam_eggs(arg=<str>, arg2=<str>) -> <str>') [('arg', 'str', False), ('arg2', 'str', False)] >>> """ try: args = re.match(SIG_RE, sig).group('args') except AttributeError: raise ValueError( u'Method signature syntax "{sig}" is incorrect.'.format(sig=sig)) try: lot = [] # Return value ``[(name, type, optional), ...]`` if len(args) > 0: args = args.split(', ') opt_flag = False for arg in args: match = re.match(ARG_RE, arg) if match.group('optional') is not None: optional = True opt_flag = True else: if opt_flag: # Optional params already encountered. raise ValueError( u'Required params must come before optional ' 'params in "{sig}".'.format(sig=sig)) optional = False lot.append( (match.group('name'), match.group('type'), optional)) return lot except __HOLE__: raise ValueError( u'Method signature params syntax "{sig}" is incorrect '.format( sig=sig))
AttributeError
dataset/ETHPy150Open orokusaki/django-jsonrpc-2-0/jsonrpc/signatures.py/params_from_signature
9,045
def return_type_from_signature(sig): """ Returns the string representation of the JSON type returned by a method (for use in ``jsonrpc.types.JSONRPCType``), based on a provided signature. Usage: >>> return_type_from_signature('spam_eggs(arg=<str>, arg2=<str>) -> <str>') 'str' >>> """ try: r_type = re.match(SIG_RE, sig).group('rtype') except __HOLE__: raise ValueError( u'Method signature syntax "{sig}" is incorrect.'.format(sig=sig)) if not r_type in JSONType.json_types: raise ValueError( u'Invalid return type "{r_type}". Allowed types are: ' '{allowed}.'.format( r_type=r_type, allowed=', '.join(JSONType.json_types))) return r_type
AttributeError
dataset/ETHPy150Open orokusaki/django-jsonrpc-2-0/jsonrpc/signatures.py/return_type_from_signature
9,046
@sensitive_post_parameters() @never_cache @deprecate_current_app def password_reset_confirm(request, uidb64=None, token=None, template_name='registration/password_reset_confirm.html', token_generator=default_token_generator, set_password_form=SetPasswordForm, post_reset_redirect=None, extra_context=None): """ View that checks the hash in a password reset link and presents a form for entering a new password. """ UserModel = get_user_model() assert uidb64 is not None and token is not None # checked by URLconf if post_reset_redirect is None: post_reset_redirect = reverse('password_reset_complete') else: post_reset_redirect = resolve_url(post_reset_redirect) try: # urlsafe_base64_decode() decodes to bytestring on Python 3 uid = force_text(urlsafe_base64_decode(uidb64)) user = UserModel._default_manager.get(pk=uid) except (__HOLE__, ValueError, OverflowError, UserModel.DoesNotExist): user = None if user is not None and token_generator.check_token(user, token): validlink = True title = _('Enter new password') if request.method == 'POST': form = set_password_form(user, request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(post_reset_redirect) else: form = set_password_form(user) else: validlink = False form = None title = _('Password reset unsuccessful') context = { 'form': form, 'title': title, 'validlink': validlink, } if extra_context is not None: context.update(extra_context) return TemplateResponse(request, template_name, context)
TypeError
dataset/ETHPy150Open django/django/django/contrib/auth/views.py/password_reset_confirm
9,047
def start(self, *args, **kwargs): self.running = True LOGGER.info("Starting: Hedwig Worker Service...") try: while self.running: if len(self.workers) < self.num_workers: worker = self.worker_cls(*args, **kwargs) worker.start() self.workers.append(worker) else: for worker in self.workers: if not worker.is_alive(): worker.shutdown() self.workers.remove(worker) worker.terminate() time.sleep(5) except __HOLE__: self.stop() except Exception as e: LOGGER.exception(str(e)) traceback.print_exc(file=sys.stdout) self.stop()
KeyboardInterrupt
dataset/ETHPy150Open ofpiyush/hedwig-py/hedwig/core/service.py/ServiceManager.start
9,048
def thumbnail_generator(image_dict): """Generates a thumbnail. Loads the data slowly.""" # Why is it a generator-function and not just a function? thumb_dir = os.path.dirname(image_dict['data']['thumb']) if not os.path.exists(thumb_dir): os.mkdir(thumb_dir) loader = gtk.gdk.PixbufLoader() try: fin = open(image_dict['data']['filename'], 'rb') while 1: data = fin.read(32768) if data: loader.write(data) yield None else: break pixbuf = loader.get_pixbuf() if pixbuf is None: raise ValueError(_("Invalid picture")) scaled = scale_image(pixbuf, image_dict['data']['thumb']) loader.close() loader = None fin.close() gc.collect() yield (image_dict, scaled) except (__HOLE__, ValueError), exc: print image_dict['data']['filename'] + ': ' + str(exc) loader.close() loader = None raise
IOError
dataset/ETHPy150Open thesamet/webilder/src/webilder/thumbs.py/thumbnail_generator
9,049
def receive_empty(self, empty, transaction): """ :type empty: Message :param empty: :type transaction: Transaction :param transaction: :rtype : Transaction """ if empty.type == defines.Types["RST"]: host, port = transaction.request.source key_token = hash(str(host) + str(port) + str(transaction.request.token)) logger.info("Remove Subscriber") try: del self._relations[key_token] except __HOLE__: pass transaction.completed = True return transaction
KeyError
dataset/ETHPy150Open Tanganelli/CoAPthon/coapthon/layers/observelayer.py/ObserveLayer.receive_empty
9,050
def remove_subscriber(self, message): logger.debug("Remove Subcriber") host, port = message.destination key_token = hash(str(host) + str(port) + str(message.token)) try: self._relations[key_token].transaction.completed = True del self._relations[key_token] except __HOLE__: logger.warning("No Subscriber")
KeyError
dataset/ETHPy150Open Tanganelli/CoAPthon/coapthon/layers/observelayer.py/ObserveLayer.remove_subscriber
9,051
@contextlib.contextmanager def make_layout(layout): tempdir = tempfile.mkdtemp() for filename, file_content in layout.items(): real_path = os.path.join(tempdir, filename) try: os.makedirs(os.path.dirname(real_path)) except __HOLE__: # assume EEXIST pass with open(real_path, 'w') as fp: fp.write(textwrap.dedent(file_content)) try: yield tempdir finally: shutil.rmtree(tempdir)
OSError
dataset/ETHPy150Open wickman/pystachio/tests/test_config.py/make_layout
9,052
def _get_json(self, fullpage): try: # extract json from inside the first and last parens # from http://codereview.stackexchange.com/questions/2561/converting-jsonp-to-json-is-this-regex-correct page = fullpage[ fullpage.index("(")+1 : fullpage.rindex(")") ] except (AttributeError, __HOLE__): raise ProviderContentMalformedError() data = provider._load_json(page) return(data)
ValueError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/scopus.py/Scopus._get_json
9,053
def _extract_metrics_and_provenance_url(self, entries, status_code=200, id=None): try: max_citation = 0 for entry in entries: citation = int(entry["citedby-count"]) if citation > max_citation: max_citation = citation api_url = entry["prism:url"] provenance_url = api_url # broken for now, getting scopus ID this way # just send back api as provenance # match = re.findall("scopus_id:([\dA-Z]+)", api_url) # scopus_id = match[0] # provenance_url = self._get_templated_url(self.provenance_url_template, scopus_id) except (__HOLE__, TypeError, ValueError): return {} if max_citation: metrics_dict = {"scopus:citations": (max_citation, provenance_url)} else: metrics_dict = {} return metrics_dict
KeyError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/scopus.py/Scopus._extract_metrics_and_provenance_url
9,054
def _extract_relevant_records(self, fullpage, id): data = provider._load_json(fullpage) response = None try: response = data["search-results"]["entry"] except (__HOLE__, ValueError): # not in Scopus database return None return response
KeyError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/scopus.py/Scopus._extract_relevant_records
9,055
def _get_relevant_record_with_biblio(self, biblio_dict): try: url = self._get_scopus_url(biblio_dict) except __HOLE__: logger.debug("tried _get_relevant_record_with_biblio but leaving because KeyError") return None if not url: return None page = self._get_scopus_page(url) if not page: return None # empty result set relevant_records = self._extract_relevant_records(page, biblio_dict) return relevant_records
KeyError
dataset/ETHPy150Open Impactstory/total-impact-webapp/totalimpact/providers/scopus.py/Scopus._get_relevant_record_with_biblio
9,056
def _map_list_types(hit_list, col_type): # TODO: handle missing because of VCF. try: if col_type in ("int", "integer"): return [int(h) for h in hit_list if not h in (None, 'nan')] elif col_type == "float": return [float(h) for h in hit_list if not h in (None, 'nan')] except __HOLE__: sys.exit('Non-numeric value found in annotation file: %s\n' % (','.join(hit_list)))
ValueError
dataset/ETHPy150Open arq5x/gemini/gemini/gemini_annotate.py/_map_list_types
9,057
def fix_val(val, type): if not type in ("int", "float"): return val if isinstance(val, (int, float)): return val if type == "int": fn = int else: fn = float if not val: return None try: return fn(val) except __HOLE__: sys.exit('Non %s value found in annotation file: %s\n' % (type, val))
ValueError
dataset/ETHPy150Open arq5x/gemini/gemini/gemini_annotate.py/fix_val
9,058
def get_hit_list(hits, col_idxs, args, _count={}): hits = list(hits) if len(hits) == 0: return [] hit_list = defaultdict(list) for hit in hits: if isinstance(hit, basestring): hit = hit.split("\t") if args.anno_file.endswith(('.vcf', '.vcf.gz')): # only makes sens to extract when there is an equal sign info = dict((x[0], x[1]) for x in (p.split('=') for p in hit[7].split(';') if '=' in p)) for idx, col_idx in enumerate(col_idxs): if not col_idx in info: hit_list[idx].append('nan') if not col_idx in _count: sys.stderr.write("WARNING: %s is missing from INFO field in %s for at " "least one record.\n" % (col_idx, args.anno_file)) _count[col_idx] = True else: hit_list[idx].append(info[col_idx]) # just append None since in a VCFthey are likely # to be missing ? else: try: for idx, col_idx in enumerate(col_idxs): hit_list[idx].append(hit[int(col_idx) - 1]) except __HOLE__: sys.exit("EXITING: Column " + args.col_extracts + " exceeds " "the number of columns in your " "annotation file.\n") return hit_list
IndexError
dataset/ETHPy150Open arq5x/gemini/gemini/gemini_annotate.py/get_hit_list
9,059
def annotate_variants_extract(args, conn, metadata, col_names, col_types, col_ops, col_idxs): """ Populate a new, user-defined column in the variants table based on the value(s) from a specific column. in the annotation file. """ def summarize_hits(hits): hit_list = get_hit_list(hits, col_idxs, args) if hit_list == []: return [] vals = [] for idx, op in enumerate(col_ops): # more than one overlap, must summarize try: val = op_funcs[op](hit_list[idx], col_types[idx]) except __HOLE__: val = None if not 'list' in op: vals.append(fix_val(val, col_types[idx])) else: # already stringed it in list/uniq_list so don't check type vals.append(val) return vals return _annotate_variants(args, conn, metadata, summarize_hits, col_names, col_types, col_ops)
ValueError
dataset/ETHPy150Open arq5x/gemini/gemini/gemini_annotate.py/annotate_variants_extract
9,060
def xilinx7_reader(csv_file): '''Extract the pin data from a Xilinx CSV file and return a dictionary of pin data.''' # Create a dictionary that uses the unit numbers as keys. Each entry in this dictionary # contains another dictionary that uses the side of the symbol as a key. Each entry in # that dictionary uses the pin names in that unit and on that side as keys. Each entry # in that dictionary is a list of Pin objects with each Pin object having the same name # as the dictionary key. So the pins are separated into units at the top level, and then # the sides of the symbol, and then the pins with the same name that are on that side # of the unit. pin_data = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) # Read title line of the CSV file and extract the part number. title = csv_file.readline() try: _, part_num, date, time, _ = re.split('\s+', title) except: return # Dump the blank line between the title and the part's pin data. _ = csv_file.readline() # Create a reader object for the rows of the CSV file and read it row-by-row. csv_reader = csv.DictReader(csv_file, skipinitialspace=True) for index, row in enumerate(csv_reader): # A blank line signals the end of the pin data. try: if row['Pin'] == '': break except __HOLE__: # Abort if a TXT file is being processed instead of a CSV file. return # Get the pin attributes from the cells of the row of data. pin = copy.copy(DEFAULT_PIN) pin.index = index pin.name = fix_pin_data(row['Pin Name'], part_num) pin.num = fix_pin_data(row['Pin'], part_num) pin.unit = fix_pin_data(row['Bank'], part_num) # The type of the pin isn't given in the CSV file, so we'll have to infer it # from the name of the pin. Pin names starting with the following prefixes # are assigned the given pin type. DEFAULT_PIN_TYPE = 'input' # Assign this pin type if name inference can't be made. PIN_TYPE_PREFIXES = [ (r'VCC', 'power_in'), (r'GND', 'power_in'), (r'IO_', 'bidirectional'), (r'DONE', 'output'), (r'VREF[PN]_', 'input'), (r'TCK', 'input'), (r'TDI', 'input'), (r'TDO', 'output'), (r'TMS', 'input'), (r'CCLK', 'input'), (r'M0', 'input'), (r'M1', 'input'), (r'M2', 'input'), (r'INIT_B', 'input'), (r'PROG', 'input'), (r'NC', 'no_connect'), (r'VP_', 'input'), (r'VN_', 'input'), (r'DXP_', 'passive'), (r'DXN_', 'passive'), (r'CFGBVS_', 'input'), (r'MGTZ?REFCLK[0-9]+[NP]_', 'input'), (r'MGTZ_OBS_CLK_[PN]_', 'input'), (r'MGT[ZPHX]TX[NP][0-9]+_', 'output'), (r'MGT[ZPHX]RX[NP][0-9]+_', 'input'), (r'MGTAVTTRCAL_', 'passive'), (r'MGTRREF_', 'passive'), (r'MGTVCCAUX_?', 'power_in'), (r'MGTAVTT_?', 'power_in'), (r'MGTZ_THERM_IN_', 'input'), (r'MGTZ_THERM_OUT_', 'input'), (r'MGTZ?A(VCC|GND)_?', 'power_in'), (r'MGTZVCC[LH]_', 'power_in'), (r'MGTZ_SENSE_(A?VCC|A?GND)[LH]?_', 'power_in'), (r'RSVD(VCC[1-3]|GND)', 'power_in'), (r'PS_CLK_', 'input'), (r'PS_POR_B', 'input'), (r'PS_SRST_B', 'input'), (r'PS_DDR_CK[PN]_', 'output'), (r'PS_DDR_CKE_', 'output'), (r'PS_DDR_CS_B_', 'output'), (r'PS_DDR_RAS_B_', 'output'), (r'PS_DDR_CAS_B_', 'output'), (r'PS_DDR_WE_B_', 'output'), (r'PS_DDR_BA[0-9]+_', 'output'), (r'PS_DDR_A[0-9]+_', 'output'), (r'PS_DDR_ODT_', 'output'), (r'PS_DDR_DRST_B_', 'output'), (r'PS_DDR_DQ[0-9]+_', 'bidirectional'), (r'PS_DDR_DM[0-9]+_', 'output'), (r'PS_DDR_DQS_[PN][0-9]+_', 'bidirectional'), (r'PS_DDR_VR[PN]_', 'power_out'), (r'PS_DDR_VREF[0-9]+_', 'power_in'), (r'PS_MIO_VREF_', 'power_in'), (r'PS_MIO[0-9]+_', 'bidirectional'), ] for prefix, typ in PIN_TYPE_PREFIXES: if re.match(prefix, pin.name, re.IGNORECASE): pin.type = typ break else: issue('No match for {} on {}, assigning as {}'.format( pin.name, part_num[:4], DEFAULT_PIN_TYPE)) pin.type = DEFAULT_PIN_TYPE pin.type = fix_pin_data(pin.type, part_num) # Add the pin from this row of the CVS file to the pin dictionary. # Place all the like-named pins into a list under their common name. # We'll unbundle them later, if necessary. pin_data[pin.unit][pin.side][pin.name].append(pin) yield part_num, pin_data # Return the dictionary of pins extracted from the CVS file.
KeyError
dataset/ETHPy150Open xesscorp/KiPart/kipart/xilinx7_reader.py/xilinx7_reader
9,061
def clean_number(val): if val is not None and (isinstance(val, str) or isinstance(val, unicode)): try: # it's an int return int(val) except __HOLE__: pass try: # it's a float return float(val) except ValueError: pass # cannot convert to number, returns string or None return val
ValueError
dataset/ETHPy150Open ExCiteS/geokey/geokey/contributions/migrations/0010_auto_20150511_1132.py/clean_number
9,062
def _open(self, devpath): # Open i2c device try: self._fd = os.open(devpath, os.O_RDWR) except OSError as e: raise I2CError(e.errno, "Opening I2C device: " + e.strerror) self._devpath = devpath # Query supported functions buf = array.array('I', [0]) try: fcntl.ioctl(self._fd, I2C._I2C_IOC_FUNCS, buf, True) except __HOLE__ as e: self.close() raise I2CError(e.errno, "Querying supported functions: " + e.strerror) # Check that I2C_RDWR ioctl() is supported on this device if (buf[0] & I2C._I2C_FUNC_I2C) == 0: self.close() raise I2CError(None, "I2C not supported on device %s." % devpath) # Methods
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/i2c.py/I2C._open
9,063
def transfer(self, address, messages): """Transfer `messages` to the specified I2C `address`. Modifies the `messages` array with the results of any read transactions. Args: address (int): I2C address. messages (list): list of I2C.Message messages. Raises: I2CError: if an I/O or OS error occurs. TypeError: if `messages` type is not list. ValueError: if `messages` length is zero, or if message data is not valid bytes. """ if not isinstance(messages, list): raise TypeError("Invalid messages type, should be list of I2C.Message.") elif len(messages) == 0: raise ValueError("Invalid messages data, should be non-zero length.") # Convert I2C.Message messages to _CI2CMessage messages cmessages = (_CI2CMessage * len(messages))() for i in range(len(messages)): # Convert I2C.Message data to bytes if isinstance(messages[i].data, bytes): data = messages[i].data elif isinstance(messages[i].data, bytearray): data = bytes(messages[i].data) elif isinstance(messages[i].data, list): data = bytes(bytearray(messages[i].data)) cmessages[i].addr = address cmessages[i].flags = messages[i].flags | (I2C._I2C_M_RD if messages[i].read else 0) cmessages[i].len = len(data) cmessages[i].buf = ctypes.cast(ctypes.create_string_buffer(data, len(data)), ctypes.POINTER(ctypes.c_ubyte)) # Prepare transfer structure i2c_xfer = _CI2CIocTransfer() i2c_xfer.nmsgs = len(cmessages) i2c_xfer.msgs = cmessages # Transfer try: fcntl.ioctl(self._fd, I2C._I2C_IOC_RDWR, i2c_xfer, False) except __HOLE__ as e: raise I2CError(e.errno, "I2C transfer: " + e.strerror) # Update any read I2C.Message messages for i in range(len(messages)): if messages[i].read: data = [cmessages[i].buf[j] for j in range(cmessages[i].len)] # Convert read data to type used in I2C.Message messages if isinstance(messages[i].data, list): messages[i].data = data elif isinstance(messages[i].data, bytearray): messages[i].data = bytearray(data) elif isinstance(messages[i].data, bytes): messages[i].data = bytes(bytearray(data))
IOError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/i2c.py/I2C.transfer
9,064
def close(self): """Close the i2c-dev I2C device. Raises: I2CError: if an I/O or OS error occurs. """ if self._fd is None: return try: os.close(self._fd) except __HOLE__ as e: raise I2CError(e.errno, "Closing I2C device: " + e.strerror) self._fd = None # Immutable properties
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/i2c.py/I2C.close
9,065
@property def plugin(self): try: if self._plugin is not None: return self._plugin except __HOLE__: pass self._plugin = manager.NeutronManager.get_plugin() return self._plugin
AttributeError
dataset/ETHPy150Open openstack/neutron/neutron/db/dvr_mac_db.py/DVRDbMixin.plugin
9,066
def get_job(job_id): try: f = open(config_dir+"/"+job_id+".timer", "r") except __HOLE__, e: return None cfg_str = f.read() f.close() cfg = parse_config(cfg_str) cfg["id"] = job_id return cfg
OSError
dataset/ETHPy150Open emersion/bups/bups/scheduler/systemd.py/get_job
9,067
@receiver(post_save, sender=Task) def call_hook(sender, instance, **kwargs): if instance.hook: f = instance.hook if not callable(f): try: module, func = f.rsplit('.', 1) m = importlib.import_module(module) f = getattr(m, func) except (ValueError, ImportError, __HOLE__): logger.error(_('malformed return hook \'{}\' for [{}]').format(instance.hook, instance.name)) return try: f(instance) except Exception as e: logger.error(_('return hook {} failed on [{}] because {}').format(instance.hook, instance.name, e))
AttributeError
dataset/ETHPy150Open Koed00/django-q/django_q/signals.py/call_hook
9,068
def test_jtheta_issue_79(): # near the circle of covergence |q| = 1 the convergence slows # down; for |q| > Q_LIM the theta functions raise ValueError mp.dps = 30 mp.dps += 30 q = mpf(6)/10 - one/10**6 - mpf(8)/10 * j mp.dps -= 30 # Mathematica run first # N[EllipticTheta[3, 1, 6/10 - 10^-6 - 8/10*I], 2000] # then it works: # N[EllipticTheta[3, 1, 6/10 - 10^-6 - 8/10*I], 30] res = mpf('32.0031009628901652627099524264') + \ mpf('16.6153027998236087899308935624') * j result = jtheta(3, 1, q) # check that for abs(q) > Q_LIM a ValueError exception is raised mp.dps += 30 q = mpf(6)/10 - one/10**7 - mpf(8)/10 * j mp.dps -= 30 try: result = jtheta(3, 1, q) except __HOLE__: pass else: assert(False) # bug reported in issue 79 mp.dps = 100 z = (1+j)/3 q = mpf(368983957219251)/10**15 + mpf(636363636363636)/10**15 * j # Mathematica N[EllipticTheta[1, z, q], 35] res = mpf('2.4439389177990737589761828991467471') + \ mpf('0.5446453005688226915290954851851490') *j mp.dps = 30 result = jtheta(1, z, q) assert(result.ae(res)) mp.dps = 80 z = 3 + 4*j q = 0.5 + 0.5*j r1 = jtheta(1, z, q) mp.dps = 15 r2 = jtheta(1, z, q) assert r1.ae(r2) mp.dps = 80 z = 3 + j q1 = exp(j*3) # longer test # for n in range(1, 6) for n in range(1, 2): mp.dps = 80 q = q1*(1 - mpf(1)/10**n) r1 = jtheta(1, z, q) mp.dps = 15 r2 = jtheta(1, z, q) assert r1.ae(r2) mp.dps = 15 # issue 79 about high derivatives assert jtheta(3, 4.5, 0.25, 9).ae(1359.04892680683) assert jtheta(3, 4.5, 0.25, 50).ae(-6.14832772630905e+33) mp.dps = 50 r = jtheta(3, 4.5, 0.25, 9) assert r.ae('1359.048926806828939547859396600218966947753213803') r = jtheta(3, 4.5, 0.25, 50) assert r.ae('-6148327726309051673317975084654262.4119215720343656')
ValueError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/tests/test_elliptic.py/test_jtheta_issue_79
9,069
def upload(self, filepath): """Uploads file from filepath to Redmine and returns an assigned token""" if self.ver is not None and LooseVersion(str(self.ver)) < LooseVersion('1.4.0'): raise VersionMismatchError('File upload') try: with open(filepath, 'rb') as stream: url = '{0}{1}'.format(self.url, '/uploads.json') response = self.request('post', url, data=stream, headers={'Content-Type': 'application/octet-stream'}) except __HOLE__: raise NoFileError return response['upload']['token']
IOError
dataset/ETHPy150Open maxtepkeev/python-redmine/redmine/__init__.py/Redmine.upload
9,070
def download(self, url, savepath=None, filename=None): """Downloads file from Redmine and saves it to savepath or returns it as bytes""" self.requests['stream'] = True # We don't want to load the entire file into memory response = self.request('get', url, raw_response=True) self.requests['stream'] = False # Return back this setting for all usual requests # If a savepath wasn't provided we return an iter_content method # so a user can call it with the desired parameters for maximum # control and iterate over the response data if savepath is None: return response.iter_content try: from urlparse import urlsplit except __HOLE__: from urllib.parse import urlsplit if filename is None: filename = urlsplit(url)[2].split('/')[-1] if not filename: raise FileUrlError savepath = os.path.join(savepath, filename) with open(savepath, 'wb') as f: for chunk in response.iter_content(1024): f.write(chunk) return savepath
ImportError
dataset/ETHPy150Open maxtepkeev/python-redmine/redmine/__init__.py/Redmine.download
9,071
def request(self, method, url, headers=None, params=None, data=None, raw_response=False): """Makes requests to Redmine and returns result in json format""" kwargs = dict(self.requests, **{ 'headers': headers or {}, 'params': params or {}, 'data': data or {}, }) if 'Content-Type' not in kwargs['headers'] and method in ('post', 'put'): kwargs['data'] = json.dumps(data) kwargs['headers']['Content-Type'] = 'application/json' if self.impersonate is not None: kwargs['headers']['X-Redmine-Switch-User'] = self.impersonate # We would like to be authenticated by API key by default if 'key' not in kwargs['params'] and self.key is not None: kwargs['params']['key'] = self.key else: kwargs['auth'] = (self.username, self.password) response = getattr(requests, method)(url, **kwargs) if response.status_code in (200, 201): if raw_response: return response elif not response.content.strip(): return True else: try: return response.json() except (ValueError, __HOLE__): raise JSONDecodeError(response) elif response.status_code == 401: raise AuthError elif response.status_code == 403: raise ForbiddenError elif response.status_code == 404: raise ResourceNotFoundError elif response.status_code == 409: raise ConflictError elif response.status_code == 412 and self.impersonate is not None: raise ImpersonateError elif response.status_code == 413: raise RequestEntityTooLargeError elif response.status_code == 422: errors = response.json()['errors'] raise ValidationError(to_string(', '.join(e if is_string(e) else ': '.join(e) for e in errors))) elif response.status_code == 500: raise ServerError raise UnknownError(response.status_code)
TypeError
dataset/ETHPy150Open maxtepkeev/python-redmine/redmine/__init__.py/Redmine.request
9,072
def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verbose=False, fixed_X_dims=None): """ This function runs on kernels to check the correctness of their implementation. It checks that the covariance function is positive definite for a randomly generated data set. :param kern: the kernel to be tested. :type kern: GPy.kern.Kernpart :param X: X input values to test the covariance function. :type X: ndarray :param X2: X2 input values to test the covariance function. :type X2: ndarray """ pass_checks = True if X is None: X = np.random.randn(10, kern.input_dim) if output_ind is not None: X[:, output_ind] = np.random.randint(kern.output_dim, X.shape[0]) if X2 is None: X2 = np.random.randn(20, kern.input_dim) if output_ind is not None: X2[:, output_ind] = np.random.randint(kern.output_dim, X2.shape[0]) if verbose: print("Checking covariance function is positive definite.") result = Kern_check_model(kern, X=X).is_positive_semi_definite() if result and verbose: print("Check passed.") if not result: print(("Positive definite check failed for " + kern.name + " covariance function.")) pass_checks = False assert(result) return False if verbose: print("Checking gradients of K(X, X) wrt theta.") result = Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=verbose) if result and verbose: print("Check passed.") if not result: print(("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")) Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=True) pass_checks = False assert(result) return False if verbose: print("Checking gradients of K(X, X2) wrt theta.") result = Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=verbose) if result and verbose: print("Check passed.") if not result: print(("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")) Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=True) pass_checks = False assert(result) return False if verbose: print("Checking gradients of Kdiag(X) wrt theta.") try: result = Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=verbose) except NotImplementedError: result=True if verbose: print(("update_gradients_diag not implemented for " + kern.name)) if result and verbose: print("Check passed.") if not result: print(("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:")) Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=True) pass_checks = False assert(result) return False if verbose: print("Checking gradients of K(X, X) wrt X.") try: testmodel = Kern_check_dK_dX(kern, X=X, X2=None) if fixed_X_dims is not None: testmodel.X[:,fixed_X_dims].fix() result = testmodel.checkgrad(verbose=verbose) except __HOLE__: result=True if verbose: print(("gradients_X not implemented for " + kern.name)) if result and verbose: print("Check passed.") if not result: print(("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")) testmodel.checkgrad(verbose=True) assert(result) pass_checks = False return False if verbose: print("Checking gradients of K(X, X2) wrt X.") try: testmodel = Kern_check_dK_dX(kern, X=X, X2=X2) if fixed_X_dims is not None: testmodel.X[:,fixed_X_dims].fix() result = testmodel.checkgrad(verbose=verbose) except NotImplementedError: result=True if verbose: print(("gradients_X not implemented for " + kern.name)) if result and verbose: print("Check passed.") if not result: print(("Gradient of K(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")) testmodel.checkgrad(verbose=True) assert(result) pass_checks = False return False if verbose: print("Checking gradients of Kdiag(X) wrt X.") try: testmodel = Kern_check_dKdiag_dX(kern, X=X) if fixed_X_dims is not None: testmodel.X[:,fixed_X_dims].fix() result = testmodel.checkgrad(verbose=verbose) except NotImplementedError: result=True if verbose: print(("gradients_X not implemented for " + kern.name)) if result and verbose: print("Check passed.") if not result: print(("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:")) Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=True) pass_checks = False assert(result) return False return pass_checks
NotImplementedError
dataset/ETHPy150Open SheffieldML/GPy/GPy/testing/kernel_tests.py/check_kernel_gradient_functions
9,073
def test_Add_dims(self): k = GPy.kern.Matern32(2, active_dims=[2,self.D]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D) k.randomize() self.assertRaises(IndexError, k.K, self.X) k = GPy.kern.Matern32(2, active_dims=[2,self.D-1]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D) k.randomize() # assert it runs: try: k.K(self.X) except __HOLE__: raise AssertionError("k.K(X) should run on self.D-1 dimension")
AssertionError
dataset/ETHPy150Open SheffieldML/GPy/GPy/testing/kernel_tests.py/KernelGradientTestsContinuous.test_Add_dims
9,074
@classmethod def select_feature(cls, instance, attr): try: val = getattr(instance, attr) except __HOLE__: try: val = getattr(cls, attr) except AttributeError: raise Exception('Attribute "%s" was not found on either the training instance "%s" or class "%s"' % (attr, instance, cls)) if callable(val): return val(instance) else: return val
AttributeError
dataset/ETHPy150Open jumoconnect/openjumo/jumodjango/miner/classifiers/classifier.py/Classifier.select_feature
9,075
def obj_change_as_msg(self, obj, msg, meteor_ids=None): """Return DDP change message of specified type (msg) for obj.""" if meteor_ids is None: meteor_ids = {} try: meteor_id = meteor_ids[str(obj.pk)] except __HOLE__: meteor_id = None if meteor_id is None: meteor_ids[str(obj.pk)] = meteor_id = get_meteor_id(obj) assert meteor_id is not None if msg == REMOVED: data = {} # `removed` only needs ID (added below) elif msg in (ADDED, CHANGED): data = self.serialize(obj, meteor_ids) else: raise ValueError('Invalid message type: %r' % msg) data.update(msg=msg, collection=self.name, id=meteor_id) return data
KeyError
dataset/ETHPy150Open django-ddp/django-ddp/dddp/api.py/Collection.obj_change_as_msg
9,076
def user_queries(self, user, *params): """Return queries for this publication as seen by `user`.""" try: get_queries = self.get_queries except __HOLE__: # statically defined queries if self.queries is None: raise NotImplementedError( 'Must set either queries or implement get_queries method.', ) if params: raise NotImplementedError( 'Publication params not implemented on %r publication.' % ( self.name, ), ) return self.queries[:] if user is False: # no need to play with `this.user_id` or `this.user_ddp_id`. return get_queries(*params) # stash the old user details old_user_id = this.user_id old_user_ddp_id = this.user_ddp_id # apply the desired user details this.user_id = None if user is None else user.pk this.user_ddp_id = None if user is None else get_meteor_id(user) try: return get_queries(*params) finally: # restore the old user details this.user_id = old_user_id this.user_ddp_id = old_user_ddp_id
AttributeError
dataset/ETHPy150Open django-ddp/django-ddp/dddp/api.py/Publication.user_queries
9,077
@transaction.atomic def do_sub(self, id_, name, silent, *params): """Subscribe the current thread to the specified publication.""" try: pub = self.get_pub_by_name(name) except __HOLE__: if not silent: raise MeteorError(404, 'Subscription not found') return sub, created = Subscription.objects.get_or_create( connection_id=this.ws.connection.pk, sub_id=id_, user_id=getattr(this, 'user_id', None), defaults={ 'publication': pub.name, 'params_ejson': ejson.dumps(params), }, ) this.subs.setdefault(sub.publication, set()).add(sub.pk) if not created: if not silent: this.send({'msg': 'ready', 'subs': [id_]}) return # re-read from DB so we can get transaction ID (xmin) sub = Subscription.objects.extra(**XMIN).get(pk=sub.pk) for col, qs in self.sub_unique_objects( sub, params, pub, xmin__lte=sub.xmin, ): sub.collections.create( model_name=model_name(qs.model), collection_name=col.name, ) if isinstance(col.model._meta.pk, AleaIdField): meteor_ids = None elif len([ field for field in col.model._meta.local_fields if ( isinstance(field, AleaIdField) ) and ( field.unique ) and ( not field.null ) ]) == 1: meteor_ids = None else: meteor_ids = get_meteor_ids( qs.model, qs.values_list('pk', flat=True), ) for obj in qs.select_related(): payload = col.obj_change_as_msg(obj, ADDED, meteor_ids) this.send(payload) if not silent: this.send({'msg': 'ready', 'subs': [id_]})
KeyError
dataset/ETHPy150Open django-ddp/django-ddp/dddp/api.py/DDP.do_sub
9,078
@api_endpoint(decorate=False) def method(self, method, params, id_): """Invoke a method.""" try: handler = self.api_path_map()[method] except __HOLE__: raise MeteorError(404, 'Method not found', method) try: inspect.getcallargs(handler, *params) except TypeError as err: raise MeteorError(400, '%s' % err) result = handler(*params) msg = {'msg': 'result', 'id': id_} if result is not None: msg['result'] = result this.send(msg)
KeyError
dataset/ETHPy150Open django-ddp/django-ddp/dddp/api.py/DDP.method
9,079
def valid_subscribers(self, model, obj, using): """Calculate valid subscribers (connections) for obj.""" col_user_ids = {} col_connection_ids = collections.defaultdict(set) for sub in Subscription.objects.filter( collections__model_name=model_name(model), ).prefetch_related('collections'): pub = self.get_pub_by_name(sub.publication) try: queries = list(pub.user_queries(sub.user, *sub.params)) except Exception: queries = [] for qs, col in ( self.qs_and_collection(qs) for qs in queries ): # check if obj is an instance of the model for the queryset if qs.model is not model: continue # wrong model on queryset # check if obj is included in this subscription if not qs.filter(pk=obj.pk).exists(): continue # subscription doesn't include this obj # filter qs using user_rel paths on collection # retreieve list of allowed users via colleciton try: user_ids = col_user_ids[col.__class__] except __HOLE__: user_ids = col_user_ids[col.__class__] = \ col.user_ids_for_object(obj) # check if user is in permitted list of users if user_ids is None: pass # unrestricted collection, anyone permitted to see. elif sub.user_id not in user_ids: continue # not for this user col_connection_ids[col].add(sub.connection_id) # result is {colleciton: set([connection_id])} return col_connection_ids
KeyError
dataset/ETHPy150Open django-ddp/django-ddp/dddp/api.py/DDP.valid_subscribers
9,080
def send_notify(self, model, obj, msg, using): """Dispatch PostgreSQL async NOTIFY.""" if model_name(model).split('.', 1)[0] in ('migrations', 'dddp'): return # never send migration or DDP internal models new_col_connection_ids = self.valid_subscribers(model, obj, using) old_col_connection_ids = self._ddp_subscribers.get( using, {}, ).get( model, {}, ).pop( obj.pk, collections.defaultdict(set), ) try: my_connection_id = this.ws.connection.pk except __HOLE__: my_connection_id = None meteor_ids = {} for col in set(old_col_connection_ids).union(new_col_connection_ids): old_connection_ids = old_col_connection_ids[col] new_connection_ids = new_col_connection_ids[col] for (msg, connection_ids) in ( (REMOVED, old_connection_ids - new_connection_ids), (CHANGED, old_connection_ids & new_connection_ids), (ADDED, new_connection_ids - old_connection_ids), ): if not connection_ids: continue # nobody subscribed payload = col.obj_change_as_msg(obj, msg, meteor_ids) payload['_connection_ids'] = sorted(connection_ids) if my_connection_id is not None: payload['_sender'] = my_connection_id if my_connection_id in connection_ids: # msg must go to connection that initiated the change payload['_tx_id'] = this.ws.get_tx_id() # header is sent in every payload header = { 'uuid': uuid.uuid1().int, # UUID1 should be unique 'seq': 1, # increments for each 8KB chunk 'fin': 0, # zero if more chunks expected, 1 if last chunk. } data = ejson.dumps(payload) cursor = connections[using].cursor() while data: hdr = ejson.dumps(header) # use all available payload space for chunk max_len = 8000 - len(hdr) - 100 # take a chunk from data chunk, data = data[:max_len], data[max_len:] if not data: # last chunk, set fin=1. header['fin'] = 1 hdr = ejson.dumps(header) # print('NOTIFY: %s' % hdr) cursor.execute( 'NOTIFY "ddp", %s', [ '%s|%s' % (hdr, chunk), # pipe separates hdr|chunk. ], ) header['seq'] += 1 # increment sequence.
AttributeError
dataset/ETHPy150Open django-ddp/django-ddp/dddp/api.py/DDP.send_notify
9,081
def tearDown(self): super(Cleanup, self).tearDown() ok = True while self._cleanups: fn, args, kwargs = self._cleanups.pop(-1) try: fn(*args, **kwargs) except __HOLE__: raise except: ok = False if not ok: raise
KeyboardInterrupt
dataset/ETHPy150Open docker/docker-py/tests/base.py/Cleanup.tearDown
9,082
def _auth_by_signature(self): if self._client_key_loader_func is None: raise RuntimeError('Client key loader function was not defined') if 'Authorization' not in request.headers: raise Unauthorized() try: mohawk.Receiver( credentials_map=self._client_key_loader_func, request_header=request.headers['Authorization'], url=request.url, method=request.method, content=request.get_data(), content_type=request.mimetype, accept_untrusted_content=current_app.config['HAWK_ACCEPT_UNTRUSTED_CONTENT'], localtime_offset_in_seconds=current_app.config['HAWK_LOCALTIME_OFFSET_IN_SECONDS'], timestamp_skew_in_seconds=current_app.config['HAWK_TIMESTAMP_SKEW_IN_SECONDS'] ) except mohawk.exc.MacMismatch: # mohawk exception contains computed MAC. # We should not expose it in response. raise Unauthorized() except ( mohawk.exc.CredentialsLookupError, mohawk.exc.AlreadyProcessed, mohawk.exc.MisComputedContentHash, mohawk.exc.TokenExpired ) as e: raise Unauthorized(str(e)) except mohawk.exc.HawkFail as e: raise BadRequest(str(e)) except __HOLE__: raise BadRequest()
KeyError
dataset/ETHPy150Open marselester/flask-api-utils/api_utils/auth.py/Hawk._auth_by_signature
9,083
def get_type(atype, prompt="", default=None, input=raw_input, error=default_error): """Get user input of a particular base type.""" while 1: if default is not None: text = input("%s [%s]> " % (prompt, default)) if not text: return default else: text = input("%s> " % (prompt, )) try: val = ast.literal_eval(text) except (SyntaxError, __HOLE__): error("Error in input. Please enter a {} value.".format(atype.__name__)) continue if type(val) is atype: return val else: error("Please enter a {} value.".format(atype.__name__))
ValueError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/cliutils.py/get_type
9,084
def choose(somelist, defidx=0, prompt="choose", input=raw_input, error=default_error): """Select an item from a list. Returns the object selected from the list index. """ assert len(list(somelist)) > 0, "list to choose from has no elements!" print_menu_list(somelist) defidx = int(defidx) assert defidx >=0 and defidx < len(somelist), "default index out of range." while 1: try: ri = get_input(prompt, defidx+1, input) # menu list starts at one except EOFError: return None try: idx = int(ri)-1 except __HOLE__: error("Bad selection. Type in the number.") continue else: try: return somelist[idx] except IndexError: error("Bad selection. Selection out of range.") continue
ValueError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/cliutils.py/choose
9,085
def choose_multiple(somelist, chosen=None, prompt="choose multiple", input=raw_input, error=default_error): somelist = somelist[:] if chosen is None: chosen = [] while 1: print( "Choose from list. Enter to end, negative index removes from chosen.") print_menu_list(somelist) if chosen: print("You have: ") print_menu_list(chosen) try: ri = get_input(prompt, None, input) # menu list starts at one except EOFError: return chosen if not ri: return chosen try: idx = int(ri) except ValueError: error("Bad selection. Type in the number.") continue else: if idx < 0: idx = -idx - 1 try: somelist.append(chosen[idx]) del chosen[idx] except __HOLE__: error("Selection out of range.") elif idx == 0: error("No zero.") else: try: chosen.append(somelist[idx-1]) del somelist[idx-1] except IndexError: error("Selection out of range.")
IndexError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/cliutils.py/choose_multiple
9,086
def choose_value(somemap, default=None, prompt="choose", input=raw_input, error=default_error): """Select an item from a mapping. Keys are indexes that are selected. Returns the value of the mapping key selected. """ first = print_menu_map(somemap) while 1: try: ri = get_input(prompt, default, input) except EOFError: return default if not ri: return default try: idx = type(first)(ri) except __HOLE__: error("Not a valid entry. Please try again.") continue if idx not in somemap: error("Not a valid selection. Please try again.") continue return somemap[idx]
ValueError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/cliutils.py/choose_value
9,087
def choose_key(somemap, default=0, prompt="choose", input=raw_input, error=default_error): """Select a key from a mapping. Returns the key selected. """ keytype = type(print_menu_map(somemap)) while 1: try: userinput = get_input(prompt, default, input) except EOFError: return default if not userinput: return default try: idx = keytype(userinput) except __HOLE__: error("Not a valid entry. Please try again.") continue if idx not in somemap: error("Not a valid selection. Please try again.") continue return idx
ValueError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/cliutils.py/choose_key
9,088
def choose_multiple_from_map(somemap, chosen=None, prompt="choose multiple", input=raw_input, error=default_error): """Choose multiple items from a mapping. Returns a mapping of items chosen. Type in the key to select the values. """ somemap = somemap.copy() if chosen is None: chosen = {} while 1: print("Choose from list. Enter to end, negative index removes from chosen.") if somemap: first = print_menu_map(somemap) else: print("(You have selected all possible choices.)") first = 0 if chosen: print("You have: ") print_menu_map(chosen) try: ri = get_input(prompt, None, input) # menu list starts at one except EOFError: return chosen if not ri: return chosen try: idx = type(first)(ri) except ValueError: error("Bad selection. Please try again.") continue else: if idx < 0: # FIXME assumes numeric keys idx = -idx # FIXME handle zero index try: somemap[idx] = chosen[idx] del chosen[idx] except __HOLE__: error("Selection out of range.") else: try: chosen[idx] = somemap[idx] del somemap[idx] except KeyError: error("Selection out of range.")
KeyError
dataset/ETHPy150Open kdart/pycopia/core/pycopia/cliutils.py/choose_multiple_from_map
9,089
def _GetCN(self, x509cert): subject = x509cert.get_subject() try: cn_id = subject.nid["CN"] cn = subject.get_entries_by_nid(cn_id)[0] except __HOLE__: raise rdfvalue.DecodeError("Cert has no CN") self.common_name = rdfvalue.RDFURN(cn.get_data().as_text())
IndexError
dataset/ETHPy150Open google/grr/grr/lib/rdfvalues/crypto.py/RDFX509Cert._GetCN
9,090
def _get_stdout_binary(): try: return sys.stdout.buffer except __HOLE__: pass try: fd = sys.stdout.fileno() return os.fdopen(fd, 'ab', 0) except Exception: pass try: return sys.__stdout__.buffer except AttributeError: pass try: fd = sys.__stdout__.fileno() return os.fdopen(fd, 'ab', 0) except Exception: pass # fallback return sys.stdout
AttributeError
dataset/ETHPy150Open alimanfoo/petl/petl/io/sources.py/_get_stdout_binary
9,091
def _get_stdin_binary(): try: return sys.stdin.buffer except AttributeError: pass try: fd = sys.stdin.fileno() return os.fdopen(fd, 'rb', 0) except Exception: pass try: return sys.__stdin__.buffer except __HOLE__: pass try: fd = sys.__stdin__.fileno() return os.fdopen(fd, 'rb', 0) except Exception: pass # fallback return sys.stdin
AttributeError
dataset/ETHPy150Open alimanfoo/petl/petl/io/sources.py/_get_stdin_binary
9,092
def handle(self, panel_name=None, **options): if panel_name is None: raise CommandError("You must provide a panel name.") if options.get('dashboard') is None: raise CommandError("You must specify the name of the dashboard " "this panel will be registered with using the " "-d or --dashboard option.") dashboard_path = options.get('dashboard') dashboard_mod_path = ".".join([dashboard_path, "dashboard"]) # Check the dashboard.py file in the dashboard app can be imported. # Add the dashboard information to our options to pass along if all # goes well. try: dashboard_mod = import_module(dashboard_mod_path) options["dash_path"] = dashboard_path options["dash_name"] = dashboard_path.split(".")[-1] except ImportError: raise CommandError("A dashboard.py module could not be imported " " from the dashboard at %r." % options.get("dashboard")) target = options.pop("target", None) if target == "auto": target = os.path.join(os.path.dirname(dashboard_mod.__file__), panel_name) if not os.path.exists(target): try: os.mkdir(target) except __HOLE__ as exc: raise CommandError("Unable to create panel directory: %s" % exc) # Use our default template if one isn't specified. if not options.get("template", None): options["template"] = self.template # We have html templates as well, so make sure those are included. options["extensions"].extend(["tmpl", "html"]) # Check that the app_name cannot be imported. try: import_module(panel_name) except ImportError: pass else: raise CommandError("%r conflicts with the name of an existing " "Python module and cannot be used as an app " "name. Please try another name." % panel_name) super(Command, self).handle('panel', panel_name, target, **options) if not target: target = os.path.join(os.curdir, panel_name) # Rename our python template files. file_names = glob.glob(os.path.join(target, "*.py.tmpl")) for filename in file_names: os.rename(filename, filename[:-5])
OSError
dataset/ETHPy150Open CiscoSystems/avos/horizon/management/commands/startpanel.py/Command.handle
9,093
def get_context_text(pathfile): """ Parse file an return context ( yaml ) and text. Context is between "{% zorna" tag and "%}" tag """ start = re.compile(r'.*?{%\s*zorna\s+(.*?)(%}|$)') end = re.compile(r'(.*?)(%})') try: fin = open(pathfile, 'r') except __HOLE__: return '', '' started = False context = '' text = '' matcher = start for line in fin: match = matcher.match(line) if match: context = context + match.group(1) if started: break else: matcher = end started = True elif started: context = context + line else: text = text + line for line in fin: text = text + line # tag_re = re.compile(r'(%s\s*block(.*?)\s*%s)(.*?)(%s\s*endblock.*?\s*%s)' % (re.escape('{%'), re.escape('%}'),re.escape('{%'), re.escape('%}')), re.M|re.DOTALL) # tag_re2 = re.compile(r'(%s\s*zorna)(.*?)(\s*%s)' % (re.escape('{%'), re.escape('%}')), re.M|re.DOTALL) # for match in tag_re.finditer(text): # g = match.groups() return context, text
IOError
dataset/ETHPy150Open zorna/zorna/zorna/utils.py/get_context_text
9,094
def load_obj(load_path): """ Loads a saved on-disk representation to a python data structure. We currently support the following file formats: * python pickle (.pkl) Arguments: load_path (str): where to the load the serialized object (full path and file name) """ if isinstance(load_path, str): load_path = os.path.expandvars(os.path.expanduser(load_path)) if load_path.endswith('.gz'): import gzip load_path = gzip.open(load_path) else: load_path = open(load_path) fname = load_path.name logger.debug("deserializing object from: %s", fname) try: return pickle.load(load_path) except __HOLE__: msg = ("Problems deserializing: %s. Its possible the interface " "for this object has changed since being serialized. You " "may need to remove and recreate it." % load_path) logger.error(msg) raise AttributeError(msg)
AttributeError
dataset/ETHPy150Open NervanaSystems/neon/neon/util/persist.py/load_obj
9,095
def load_class(ctype): """ Helper function to take a string with the neon module and classname then import and return the class object Arguments: ctype (str): string with the neon module and class (e.g. 'neon.layers.layer.Linear') Returns: class """ # extract class name and import neccessary module. class_path = ctype parts = class_path.split('.') module = '.'.join(parts[:-1]) try: clss = __import__(module) for comp in parts[1:]: clss = getattr(clss, comp) return clss except (ValueError, __HOLE__) as err: if len(module) == 0: # try to find the module inside neon pkg = sys.modules['neon'] prfx = pkg.__name__ + '.' for imptr, nm, _ in pkgutil.iter_modules(pkg.__path__, prefix=prfx): mod = importlib.import_module(nm) if hasattr(mod, ctype): return getattr(mod, ctype) raise err
ImportError
dataset/ETHPy150Open NervanaSystems/neon/neon/util/persist.py/load_class
9,096
def _get_data(self): adapter = self.data_source['adapter'] geo_col = self.data_source.get('geo_column', 'geo') try: loader = getattr(self, '_get_data_%s' % adapter) except __HOLE__: raise RuntimeError('unknown adapter [%s]' % adapter) data = loader(self.data_source, dict(self.request.GET.iteritems())) return self._to_geojson(data, geo_col)
AttributeError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/reports/standard/maps.py/GenericMapReport._get_data
9,097
def _to_geojson(self, data, geo_col): def _parse_geopoint(raw): try: latlon = [float(k) for k in re.split(' *,? *', raw)[:2]] return [latlon[1], latlon[0]] # geojson is lon, lat except __HOLE__: return None metadata = {} def points(): for row in data: if '_meta' in row: # not a real data row metadata.update(row['_meta']) continue geo = row[geo_col] if geo is None: continue e = geo depth = 0 while hasattr(e, '__iter__'): e = e[0] depth += 1 if depth < 2: if depth == 0: geo = _parse_geopoint(geo) if geo is None: continue feature_type = 'Point' else: if depth == 2: geo = [geo] depth += 1 feature_type = 'MultiPolygon' if depth == 4 else 'Polygon' properties = dict((k, v) for k, v in row.iteritems() if k != geo_col) # handle 'display value / raw value' fields (for backwards compatibility with # existing data sources) # note: this is not ideal for the maps report, as we have no idea how to properly # format legends; it's better to use a formatter function in the maps report config display_props = {} for k, v in properties.iteritems(): if isinstance(v, dict) and set(v.keys()) == set(('html', 'sort_key')): properties[k] = v['sort_key'] display_props['__disp_%s' % k] = v['html'] properties.update(display_props) yield { 'type': 'Feature', 'geometry': { 'type': feature_type, 'coordinates': geo, }, 'properties': properties, } features = list(points()) return { 'type': 'FeatureCollection', 'features': features, 'metadata': metadata, }
ValueError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/reports/standard/maps.py/GenericMapReport._to_geojson
9,098
def do_bf(): i = 0 try: filename = "codes.txt" FILE = open (filename,"r" ) codes = FILE.readlines() FILE.close() except __HOLE__: screenLock.acquire() print "[+] codes.txt file not found! \n[+] Please put codes into codes.txt and re-run the program\n" screenLock.release() sys.exit(1) for code in codes: if code.find("#") >= 0: continue cmd = 'pilight-send -p raw -c "' + code.strip() + '"' screenLock.acquire() i = i + 1 print(str(i) + " | " + cmd) screenLock.release() os.system(cmd) time.sleep(0.2)
IOError
dataset/ETHPy150Open mertsarica/hack4career/codes/pilight-bf.py/do_bf
9,099
@lazy_import def _win32txf(): try: import esky.fstransact.win32txf except __HOLE__: return None else: return esky.fstransact.win32txf
ImportError
dataset/ETHPy150Open cloudmatrix/esky/esky/fstransact/__init__.py/_win32txf