Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
1,800
def addInterface(self, iTag, iType, clsName, addr): """ Add an interface to the ROS environment inside the container. @param iTag: Tag which is used to identify the interface in subsequent requests. @type iTag: str @param iType: Type of the interface. The type has to be of the form: {prefix}Interface whit valid prefixes: ServiceClient, ServiceProvider, Publisher, Subscriber @type iType: str @param clsName: Message type/Service type consisting of the package and the name of the message/service, i.e. 'std_msgs/Int32'. @type clsName: str @param addr: ROS name/address which the interface should use. @type addr: str """ try: validateName(iTag) except IllegalName: raise InvalidRequest('Interface tag is not a valid.') if iTag in self._interfaces: raise InvalidRequest("Can not use the same interface tag '{0}' " 'in the same container twice.'.format(iTag)) try: iType = Types.encode(iType) except __HOLE__: raise InvalidRequest('Interface type is invalid (Unknown prefix).') interface = self._obj.createInterface(iType, clsName, addr) interface = Interface(interface, iType, clsName) self._interfaces[iTag] = interface interface.notifyOnDeath(self._interfaceDied)
TypeError
dataset/ETHPy150Open rapyuta/rce/rce-core/rce/core/wrapper.py/Container.addInterface
1,801
def removeInterface(self, iTag): """ Remove an interface from the ROS environment inside the container. @param iTag: Tag which is used to identify the interface which should be removed. @type iTag: str """ try: self._interfaces.pop(iTag).destroy() except __HOLE__: raise InvalidRequest('Can not remove a non existent interface ' "'{0}' from the container.".format(iTag))
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-core/rce/core/wrapper.py/Container.removeInterface
1,802
def getInterface(self, iTag): """ Return the wrapped interface instance matching the given tag. @param iTag: Tag which is used to identify the interface which should be returned. @type iTag: str @return: Wrapped interface instance which was requested. @rtype: rce.core.user.Interface """ try: return self._interfaces[iTag] except __HOLE__: raise InvalidRequest('Can not get a non existent interface ' "'{0}' from the container.".format(iTag))
KeyError
dataset/ETHPy150Open rapyuta/rce/rce-core/rce/core/wrapper.py/Container.getInterface
1,803
def call(method, api_or_module, url, body='', headers=None, **params): """Simulates a round-trip call against the given API / URL""" api = API(api_or_module).http.server() response = StartResponseMock() headers = {} if headers is None else headers if not isinstance(body, str) and 'json' in headers.get('content-type', 'application/json'): body = output_format.json(body) headers.setdefault('content-type', 'application/json') result = api(create_environ(path=url, method=method, headers=headers, query_string=urlencode(params, True), body=body), response) if result: try: response.data = result[0].decode('utf8') except __HOLE__: response.data = [] for chunk in result: response.data.append(chunk.decode('utf8')) response.data = "".join(response.data) except UnicodeDecodeError: response.data = result[0] response.content_type = response.headers_dict['content-type'] if response.content_type == 'application/json': response.data = json.loads(response.data) return response
TypeError
dataset/ETHPy150Open timothycrosley/hug/hug/test.py/call
1,804
def summary ( cvsfile, write_func, *args ): """Show the column names. """ fsnames = cvsfile.fieldnames data_dict = {} for f in fsnames: data_dict[f]=[] #print "\n".join(map( lambda x:":".join(map(str,x)) ,enumerate(fsnames)) ) for l in cvsfile: for f in fsnames: v = l.setdefault(f,None) if v and v!="NA": data_dict[f].append(v) write_func( "colnum:colname\tsum,mean,median,std,cutoff\n" ) for (i,f) in enumerate(fsnames): try: v_array = map(float,data_dict[f]) v_sum = "%.2f" % sum(v_array) v_mean = "%.2f" % mean(v_array) v_median = "%.2f" % median(v_array) v_std = "%.2f" % std(v_array, float(v_mean)) v_cutoff = "%.2f" % (float(v_mean)+float(v_std)) except __HOLE__: (v_sum,v_mean,v_median,v_std,v_cutoff)=["NA"]*5 write_func( "%d:%s\t%s,%s,%s,%s,%s\n" % (i,f,v_sum,v_mean,v_median,v_std,v_cutoff ))
ValueError
dataset/ETHPy150Open taoliu/taolib/Scripts/ce_histone_matrix.py/summary
1,805
def readPlistFromString(data): '''Read a plist data from a string. Return the root object.''' try: plistData = buffer(data) except __HOLE__, err: raise NSPropertyListSerializationException(err) dataObject, dummy_plistFormat, error = ( NSPropertyListSerialization. propertyListFromData_mutabilityOption_format_errorDescription_( plistData, NSPropertyListMutableContainers, None, None)) if dataObject is None: if error: error = error.encode('ascii', 'ignore') else: error = "Unknown error" raise NSPropertyListSerializationException(error) else: return dataObject
TypeError
dataset/ETHPy150Open munki/munki/code/client/munkilib/FoundationPlist.py/readPlistFromString
1,806
def _priority_from_env(self, val): """Gets priority pairs from env.""" for part in val.split(':'): try: rule, priority = part.split('=') yield rule, int(priority) except __HOLE__: continue
ValueError
dataset/ETHPy150Open nvbn/thefuck/thefuck/conf.py/Settings._priority_from_env
1,807
def handle(self, addrport='', *args, **options): import django from google.appengine._internal.django.core.servers.basehttp import run, AdminMediaHandler, WSGIServerException from google.appengine._internal.django.core.handlers.wsgi import WSGIHandler if args: raise CommandError('Usage is runserver %s' % self.args) if not addrport: addr = '' port = '8000' else: try: addr, port = addrport.split(':') except ValueError: addr, port = '', addrport if not addr: addr = '127.0.0.1' if not port.isdigit(): raise CommandError("%r is not a valid port number." % port) use_reloader = options.get('use_reloader', True) admin_media_path = options.get('admin_media_path', '') shutdown_message = options.get('shutdown_message', '') quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C' def inner_run(): from google.appengine._internal.django.conf import settings from google.appengine._internal.django.utils import translation print "Validating models..." self.validate(display_num_errors=True) print "\nDjango version %s, using settings %r" % (django.get_version(), settings.SETTINGS_MODULE) print "Development server is running at http://%s:%s/" % (addr, port) print "Quit the server with %s." % quit_command # django.core.management.base forces the locale to en-us. We should # set it up correctly for the first request (particularly important # in the "--noreload" case). translation.activate(settings.LANGUAGE_CODE) try: handler = AdminMediaHandler(WSGIHandler(), admin_media_path) run(addr, int(port), handler) except WSGIServerException, e: # Use helpful error messages instead of ugly tracebacks. ERRORS = { 13: "You don't have permission to access that port.", 98: "That port is already in use.", 99: "That IP address can't be assigned-to.", } try: error_text = ERRORS[e.args[0].args[0]] except (AttributeError, __HOLE__): error_text = str(e) sys.stderr.write(self.style.ERROR("Error: %s" % error_text) + '\n') # Need to use an OS exit because sys.exit doesn't work in a thread os._exit(1) except KeyboardInterrupt: if shutdown_message: print shutdown_message sys.exit(0) if use_reloader: from google.appengine._internal.django.utils import autoreload autoreload.main(inner_run) else: inner_run()
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/_internal/django/core/management/commands/runserver.py/Command.handle
1,808
def __init__(self, **config): _Graph.__init__(self, **config) self.add_defaults(NetGraph.defaults) if self.interface == "auto": try: self.interface = self.get_main_iface() except __HOLE__: logger.warning( "NetGraph - Automatic interface detection failed, " "falling back to 'eth0'" ) self.interface = "eth0" self.filename = '/sys/class/net/{interface}/statistics/{type}'.format( interface=self.interface, type=self.bandwidth_type == 'down' and 'rx_bytes' or 'tx_bytes' ) self.bytes = 0 self.bytes = self._getValues()
RuntimeError
dataset/ETHPy150Open qtile/qtile/libqtile/widget/graph.py/NetGraph.__init__
1,809
def _getValues(self): try: with open(self.filename) as file: val = int(file.read()) rval = val - self.bytes self.bytes = val return rval except __HOLE__: return 0
IOError
dataset/ETHPy150Open qtile/qtile/libqtile/widget/graph.py/NetGraph._getValues
1,810
@staticmethod def get_main_iface(): def make_route(line): return dict(zip(['iface', 'dest'], line.split())) with open('/proc/net/route', 'r') as fp: lines = fp.readlines() routes = [make_route(line) for line in lines[1:]] try: return next( (r for r in routes if not int(r['dest'], 16)), routes[0] )['iface'] except (__HOLE__, IndexError, ValueError): raise RuntimeError('No valid interfaces available')
KeyError
dataset/ETHPy150Open qtile/qtile/libqtile/widget/graph.py/NetGraph.get_main_iface
1,811
def _getActivity(self): try: # io_ticks is field number 9 with open(self.path) as f: io_ticks = int(f.read().split()[9]) except __HOLE__: return 0 activity = io_ticks - self._prev self._prev = io_ticks return activity
IOError
dataset/ETHPy150Open qtile/qtile/libqtile/widget/graph.py/HDDBusyGraph._getActivity
1,812
def my_import(name): try: imp = __import__(name) except __HOLE__ as err: raise JobError("Failed to import %s with error %s" % (name, err)) mods = name.split('.') if len(mods) > 1: for mod in mods[1:]: imp = getattr(imp, mod) return imp
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/jobs.py/my_import
1,813
def find_jobs(jobs_dir): try: return [f[:-3] for f in os.listdir(jobs_dir) if not f.startswith('_') and f.endswith(".py")] except __HOLE__: return []
OSError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/jobs.py/find_jobs
1,814
def get_jobs(when=None, only_scheduled=False): """ Returns a dictionary mapping of job names together with their respective application class. """ # FIXME: HACK: make sure the project dir is on the path when executed as ./manage.py import sys try: cpath = os.path.dirname(os.path.realpath(sys.argv[0])) ppath = os.path.dirname(cpath) if ppath not in sys.path: sys.path.append(ppath) except: pass _jobs = {} if True: from django.conf import settings for app_name in settings.INSTALLED_APPS: scandirs = (None, 'minutely', 'quarter_hourly', 'hourly', 'daily', 'weekly', 'monthly', 'yearly') if when: scandirs = None, when for subdir in scandirs: try: path = find_job_module(app_name, subdir) for name in find_jobs(path): if (app_name, name) in _jobs: raise JobError("Duplicate job %s" % name) job = import_job(app_name, name, subdir) if only_scheduled and job.when is None: # only include jobs which are scheduled continue if when and job.when != when: # generic job not in same schedule continue _jobs[(app_name, name)] = job except __HOLE__: # No job module -- continue scanning pass return _jobs
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/jobs.py/get_jobs
1,815
def _resolve_device_type(self, device): """ Given a device, determines if it is a CloudServer, a CloudLoadBalancer, or an invalid device. """ try: from tests.unit import fakes server_types = (pyrax.CloudServer, fakes.FakeServer) lb_types = (CloudLoadBalancer, fakes.FakeLoadBalancer, fakes.FakeDNSDevice) except __HOLE__: # Not running with tests server_types = (pyrax.CloudServer, ) lb_types = (CloudLoadBalancer, ) if isinstance(device, server_types): device_type = "server" elif isinstance(device, lb_types): device_type = "loadbalancer" else: raise exc.InvalidDeviceType("The device '%s' must be a CloudServer " "or a CloudLoadBalancer." % device) return device_type
ImportError
dataset/ETHPy150Open rackspace/pyrax/pyrax/clouddns.py/CloudDNSManager._resolve_device_type
1,816
def update_ptr_record(self, device, record, domain_name, data=None, ttl=None, comment=None): """ Updates a PTR record with the supplied values. """ device_type = self._resolve_device_type(device) href, svc_name = self._get_ptr_details(device, device_type) try: rec_id = record.id except __HOLE__: rec_id = record rec = {"name": domain_name, "id": rec_id, "type": "PTR", "data": data, } if ttl is not None: # Minimum TTL is 300 seconds rec["ttl"] = max(300, ttl) if comment is not None: # Maximum comment length is 160 chars rec["comment"] = comment[:160] body = {"recordsList": { "records": [rec]}, "link": { "content": "", "href": href, "rel": svc_name, }} uri = "/rdns" try: resp, resp_body = self._async_call(uri, body=body, method="PUT", has_response=False, error_class=exc.PTRRecordUpdateFailed) except exc.EndpointNotFound as e: raise exc.InvalidPTRRecord("The record domain/IP address " "information is not valid for this device.") return resp_body.get("status") == "COMPLETED"
AttributeError
dataset/ETHPy150Open rackspace/pyrax/pyrax/clouddns.py/CloudDNSManager.update_ptr_record
1,817
def next(self): """ Return the next available item. If there are no more items in the local 'results' list, check if there is a 'next_uri' value. If so, use that to get the next page of results from the API, and return the first item from that query. """ try: return self.results.pop(0) except __HOLE__: if self.next_uri is None: raise StopIteration() else: if not self.next_uri: if self.domain: self.results = self.list_method(self.domain) else: self.results = self.list_method() else: args = self.extra_args self.results = self._list_method(self.next_uri, *args) self.next_uri = self.manager._paging.get( self.paging_service, {}).get("next_uri") # We should have more results. try: return self.results.pop(0) except IndexError: raise StopIteration()
IndexError
dataset/ETHPy150Open rackspace/pyrax/pyrax/clouddns.py/ResultsIterator.next
1,818
def delete_cache_file(cache_file): """ Deletes a cache file. Ignores some common, typically harmless errors. :param basestring cache_dir: Full path to a cache file to delete. """ try: os.unlink(cache_file) except (__HOLE__, OSError): pass
IOError
dataset/ETHPy150Open gtaylor/EVE-Market-Data-Uploader/emdu/utils.py/delete_cache_file
1,819
@permission_required("core.manage_shop") def manage_shipping(request): """Dispatches to the first shipping method or to the add shipping method form if there is no shipping method. """ try: shipping_method = ShippingMethod.objects.all()[0] except __HOLE__: url = reverse("lfs_manage_no_shipping_methods") else: url = reverse("lfs_manage_shipping_method", kwargs={"shipping_method_id": shipping_method.id}) return HttpResponseRedirect(url)
IndexError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/shipping_methods/views.py/manage_shipping
1,820
@permission_required("core.manage_shop") def shipping_methods(request, template_name="manage/shipping_methods/shipping_methods.html"): """Returns all shipping methods as html. This view is used as a part within the manage shipping view. """ try: current_id = int(request.path.split("/")[-1]) except __HOLE__: current_id = "" return render_to_string(template_name, RequestContext(request, { "current_id": current_id, "shipping_methods": ShippingMethod.objects.all(), }))
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/shipping_methods/views.py/shipping_methods
1,821
@permission_required("core.manage_shop") def add_shipping_price(request, shipping_method_id): """Adds given shipping price (via request body) to shipping method with give id. Returns JSON encoded data. """ try: price = float(request.POST.get("price", 0)) except __HOLE__: price = 0.0 shipping_method = get_object_or_404(ShippingMethod, pk=shipping_method_id) shipping_method.prices.create(price=price) _update_price_positions(shipping_method) message = _(u"Price has been added") html = [["#prices", shipping_method_prices(request, shipping_method_id)]] result = json.dumps({ "html": html, "message": message, }, cls=LazyEncoder) return HttpResponse(result, content_type='application/json')
ValueError
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/shipping_methods/views.py/add_shipping_price
1,822
@permission_required("core.manage_shop") def update_shipping_prices(request, shipping_method_id): """Saves/Deletes shipping prices with passed ids (via request body) dependent on given action (via request body). """ shipping_method = get_object_or_404(ShippingMethod, pk=shipping_method_id) action = request.POST.get("action") if action == "delete": message = _(u"Prices have been deleted") for key in request.POST.keys(): if key.startswith("delete-"): try: id = key.split("-")[1] price = get_object_or_404(ShippingMethodPrice, pk=id) except (IndexError, __HOLE__): continue else: price.delete() elif action == "update": message = _(u"Prices have been updated") for key, value in request.POST.items(): if key.startswith("price-"): try: id = key.split("-")[1] price = get_object_or_404(ShippingMethodPrice, pk=id) except (IndexError, ObjectDoesNotExist): continue else: try: value = float(value) except ValueError: value = 0.0 price.price = value price.priority = request.POST.get("priority-%s" % id, 0) price.save() _update_price_positions(shipping_method) html = [["#prices", shipping_method_prices(request, shipping_method_id)]] result = json.dumps({ "html": html, "message": message, }, cls=LazyEncoder) return HttpResponse(result, content_type='application/json')
ObjectDoesNotExist
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/shipping_methods/views.py/update_shipping_prices
1,823
@permission_required("core.manage_shop") @require_POST def delete_shipping_method(request, shipping_method_id): """Deletes shipping method with passed shipping id. All customers, which have selected this shipping method are getting the default shipping method. """ try: shipping_method = ShippingMethod.objects.get(pk=shipping_method_id) except __HOLE__: pass else: for customer in Customer.objects.filter(selected_shipping_method=shipping_method_id): customer.selected_shipping_method = shipping_utils.get_default_shipping_method(request) customer.save() shipping_method.delete() return lfs.core.utils.set_message_cookie( url=reverse("lfs_manage_shipping"), msg=_(u"Shipping method has been deleted."), )
ObjectDoesNotExist
dataset/ETHPy150Open diefenbach/django-lfs/lfs/manage/shipping_methods/views.py/delete_shipping_method
1,824
def iter_format_modules(lang): """ Does the heavy lifting of finding format modules. """ if check_for_language(lang): format_locations = ['django.conf.locale.%s'] if settings.FORMAT_MODULE_PATH: format_locations.append(settings.FORMAT_MODULE_PATH + '.%s') format_locations.reverse() locale = to_locale(lang) locales = [locale] if '_' in locale: locales.append(locale.split('_')[0]) for location in format_locations: for loc in locales: try: yield import_module('.formats', location % loc) except __HOLE__: pass
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/utils/formats.py/iter_format_modules
1,825
def get_format(format_type, lang=None, use_l10n=None): """ For a specific format type, returns the format for the current language (locale), defaults to the format in the settings. format_type is the name of the format, e.g. 'DATE_FORMAT' If use_l10n is provided and is not None, that will force the value to be localized (or not), overriding the value of settings.USE_L10N. """ format_type = force_str(format_type) if use_l10n or (use_l10n is None and settings.USE_L10N): if lang is None: lang = get_language() cache_key = (format_type, lang) try: cached = _format_cache[cache_key] if cached is not None: return cached else: # Return the general setting by default return getattr(settings, format_type) except KeyError: for module in get_format_modules(lang): try: val = getattr(module, format_type) for iso_input in ISO_INPUT_FORMATS.get(format_type, ()): if iso_input not in val: if isinstance(val, tuple): val = list(val) val.append(iso_input) _format_cache[cache_key] = val return val except __HOLE__: pass _format_cache[cache_key] = None return getattr(settings, format_type)
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.5/django/utils/formats.py/get_format
1,826
def test_performance(self): with self.toggle_run_expected_output(): start_time = time.time() try: self.assert_expected_output() except __HOLE__: pass end_time = time.time() total_time = end_time - start_time # This finishes in under a second on python 2.7 expected_time = 3 if sys.version_info[0] == 3: expected_time = 5 # Slower on python 3 error_message = 'Total time: %s; Expected time: %d' % ( total_time, expected_time, ) assert total_time < expected_time, error_message
AssertionError
dataset/ETHPy150Open CenterForOpenScience/pydocx/tests/export/test_xml.py/DeeplyNestedTableTestCase.test_performance
1,827
def test_performance(self): with self.toggle_run_expected_output(): start_time = time.time() try: self.assert_expected_output() except __HOLE__: pass end_time = time.time() total_time = end_time - start_time # This finishes in under a second on python 2.7 expected_time = 3 if sys.version_info[0] == 3: expected_time = 7 # Slower on python 3 error_message = 'Total time: %s; Expected time: %d' % ( total_time, expected_time, ) assert total_time < expected_time, error_message
AssertionError
dataset/ETHPy150Open CenterForOpenScience/pydocx/tests/export/test_xml.py/LargeCellTestCase.test_performance
1,828
def get(self, request, slug, year, month): addon = self.get_addon(request, slug) check_stats_permission(request, addon) data = [] path = u'{id}/{year}/{month}/'.format( id=addon.id, year=year, month=month) try: files = storage.listdir(path) except __HOLE__: return Response({ 'error': 'No archived data for addon "%s" found.' % addon.slug, }, status=status.HTTP_404_NOT_FOUND) for file_ in files[1]: file_name, _ = os.path.splitext(file_) date_str, model_name = file_name.rsplit('_', 1) day = datetime.strptime(date_str, '%Y_%m_%d').date() data.append({ 'addon_id': addon.id, 'date': day, 'model_name': model_name }) return Response(data)
OSError
dataset/ETHPy150Open mozilla/addons-server/src/olympia/stats/views.py/ArchiveListView.get
1,829
def get(self, request, slug, year, month, day, model_name): addon = self.get_addon(request, slug) check_stats_permission(request, addon) tm = '{id}/{year}/{month}/{year}_{month}_{day}_{model_name}.json' path = tm.format( id=addon.id, year=year, month=month, day=day, model_name=model_name) try: with storage.open(path) as fobj: data = json.load(fobj) except (__HOLE__, IOError): return Response({ 'error': 'No archived data for addon "%s" found.' % addon.slug, }, status=status.HTTP_404_NOT_FOUND) return Response(data)
OSError
dataset/ETHPy150Open mozilla/addons-server/src/olympia/stats/views.py/ArchiveView.get
1,830
def test_fs_configuration(fs_config, hadoop_bin_conf): """Test FS configuration. Returns list of (confvar, error).""" TEST_FILE = '/tmp/.hue_config_test.%s' % (random.randint(0, 9999999999)) res = [ ] res.extend(validate_port(fs_config.NN_THRIFT_PORT)) res.extend(validate_port(fs_config.NN_HDFS_PORT)) if res: return res # Check thrift plugin try: fs = HadoopFileSystem.from_config( fs_config, hadoop_bin_path=hadoop_bin_conf.get()) fs.setuser(fs.superuser) ls = fs.listdir('/') except TTransport.TTransportException: msg = 'Failed to contact Namenode plugin at %s:%s.' % \ (fs_config.NN_HOST.get(), fs_config.NN_THRIFT_PORT.get()) LOG.exception(msg) res.append((fs_config, msg)) return res except (IOError, IOException): msg = 'Failed to see HDFS root directory at %s. Please check HDFS configuration.' % (fs.uri,) LOG.exception(msg) res.append((fs_config, msg)) return res if 'tmp' not in ls: return res # Check nn port (via upload) try: w_file = fs.open(TEST_FILE, 'w') except __HOLE__, ex: msg = 'Failed to execute Hadoop (%s)' % (hadoop_bin_conf.get(),) LOG.exception(msg) res.append((hadoop_bin_conf, msg)) return res try: try: w_file.write('hello world') w_file.close() except IOError: msg = 'Failed to upload files using %s' % (fs.uri,) LOG.exception(msg) res.append((fs_config.NN_HDFS_PORT, msg)) return res # Check dn plugin (via read) try: r_file = fs.open(TEST_FILE, 'r') r_file.read() except Exception: msg = 'Failed to read file. Are all datanodes configured with the HUE plugin?' LOG.exception(msg) res.append((fs_config, msg)) finally: # Cleanup. Ignore if file not found. try: if fs.exists(TEST_FILE): fs.remove(TEST_FILE) except Exception, ex: LOG.error('Failed to cleanup test file "%s:%s": %s' % (fs.uri, TEST_FILE, ex)) return res
OSError
dataset/ETHPy150Open cloudera/hue/desktop/libs/hadoop/src/hadoop/fs/hadoopfs.py/test_fs_configuration
1,831
def create_home_dir(self, home_path=None): if home_path is None: home_path = self.get_home_dir() mode = int(HOME_DIR_PERMISSIONS.get(), 8) if not self.exists(home_path): user = self.user try: try: self.setuser(self.superuser) self.mkdir(home_path) self.chmod(home_path, mode) self.chown(home_path, user, user) except __HOLE__: msg = 'Failed to create home dir ("%s") as superuser %s' % (home_path, self.superuser) LOG.exception(msg) raise finally: self.setuser(user)
IOError
dataset/ETHPy150Open cloudera/hue/desktop/libs/hadoop/src/hadoop/fs/hadoopfs.py/Hdfs.create_home_dir
1,832
@require_open def close(self): try: (stdout, stderr) = self.putter.communicate() except __HOLE__, ioe: logging.debug("Saw IOError writing %r" % self.path, exc_info=1) if ioe.errno == errno.EPIPE: stdout, stderr = self.putter.communicate() self.closed = True if stderr: LOG.warn("HDFS FileUpload (cmd='%s', env='%s') outputted stderr:\n%s" % (repr(self.subprocess_cmd), repr(self.subprocess_env), stderr)) if stdout: LOG.info("HDFS FileUpload (cmd='%s', env='%s') outputted stdout:\n%s" % (repr(self.subprocess_cmd), repr(self.subprocess_env), stdout)) if self.putter.returncode != 0: raise IOError("hdfs put returned bad code: %d\nstderr: %s" % (self.putter.returncode, stderr)) LOG.info("Completed upload: %s" % repr(self.subprocess_cmd))
IOError
dataset/ETHPy150Open cloudera/hue/desktop/libs/hadoop/src/hadoop/fs/hadoopfs.py/FileUpload.close
1,833
def cd(self,*args, **kwargs): pyTerm = kwargs["pyTerm"] try: sequence = kwargs["sequence"][0] except __HOLE__: sequence = "" if sequence == "..": # upward currentPath = pyTerm.getPath().split("/")[::-1][1::] if currentPath[0] == '': pyTerm.setPath('/') else: pyTerm.setPath("/".join(currentPath[::-1])) elif sequence == "" or sequence == "~": pyTerm.setPath("/home/"+pyTerm.getUser()) else: # downward currentPath = os.path.join(pyTerm.getPath(), sequence) if os.path.isdir(currentPath): pyTerm.setPath(currentPath) else: print 'Invalid Directory!' os.chdir(pyTerm.currentPath)
IndexError
dataset/ETHPy150Open jeffersonmourak/pyTerm/plugins/pathNavigator.py/pathNavigator.cd
1,834
def mongoengine_validate_wrapper(old_clean, new_clean): """ A wrapper function to validate formdata against mongoengine-field validator and raise a proper django.forms ValidationError if there are any problems. """ def inner_validate(value): value = old_clean(value) try: new_clean(value) return value except __HOLE__, e: raise forms.ValidationError(e) return inner_validate
ValidationError
dataset/ETHPy150Open stephrdev/django-mongoforms/mongoforms/utils.py/mongoengine_validate_wrapper
1,835
def _preprocess_fields(form): for field in form.fields: name = form.fields[field].widget.__class__.__name__.lower() if not name.startswith("radio") and not name.startswith("checkbox"): try: form.fields[field].widget.attrs["class"] += " form-control" except __HOLE__: form.fields[field].widget.attrs["class"] = " form-control" return form
KeyError
dataset/ETHPy150Open mgaitan/waliki/waliki/templatetags/bootstrap_tags.py/_preprocess_fields
1,836
@click.command(short_help="Transform coordinates.") @click.argument('INPUT', default='-', required=False) @click.option('--src-crs', '--src_crs', default='EPSG:4326', help="Source CRS.") @click.option('--dst-crs', '--dst_crs', default='EPSG:4326', help="Destination CRS.") @precision_opt @click.pass_context def transform(ctx, input, src_crs, dst_crs, precision): import rasterio.warp verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1 logger = logging.getLogger('rio') # Handle the case of file, stream, or string input. try: src = click.open_file(input).readlines() except __HOLE__: src = [input] try: with Env(CPL_DEBUG=verbosity > 2) as env: if src_crs.startswith('EPSG'): src_crs = {'init': src_crs} elif os.path.exists(src_crs): with rasterio.open(src_crs) as f: src_crs = f.crs if dst_crs.startswith('EPSG'): dst_crs = {'init': dst_crs} elif os.path.exists(dst_crs): with rasterio.open(dst_crs) as f: dst_crs = f.crs for line in src: coords = json.loads(line) xs = coords[::2] ys = coords[1::2] xs, ys = rasterio.warp.transform(src_crs, dst_crs, xs, ys) if precision >= 0: xs = [round(v, precision) for v in xs] ys = [round(v, precision) for v in ys] result = [0] * len(coords) result[::2] = xs result[1::2] = ys print(json.dumps(result)) except Exception: logger.exception("Exception caught during processing") raise click.Abort()
IOError
dataset/ETHPy150Open mapbox/rasterio/rasterio/rio/transform.py/transform
1,837
def mkdir_p(dirname): assert dirname is not None if dirname == '': return try: os.makedirs(dirname) except __HOLE__ as e: if e.errno != 17: raise e
OSError
dataset/ETHPy150Open ppwwyyxx/tensorpack/tensorpack/utils/fs.py/mkdir_p
1,838
@defer.inlineCallbacks def test_process_chain(self): x = yield process_chain([cb1, cb2, cb3], 'res', 'v1', 'v2') self.assertEqual(x, "(cb3 (cb2 (cb1 res v1 v2) v1 v2) v1 v2)") gotexc = False try: yield process_chain([cb1, cb_fail, cb3], 'res', 'v1', 'v2') except __HOLE__ as e: gotexc = True self.assertTrue(gotexc)
TypeError
dataset/ETHPy150Open scrapy/scrapy/tests/test_utils_defer.py/DeferUtilsTest.test_process_chain
1,839
def extract_and_validate(email): try: stream_name, token = decode_email_address(email) except (__HOLE__, ValueError): raise ZulipEmailForwardError("Malformed email recipient " + email) if not valid_stream(stream_name, token): raise ZulipEmailForwardError("Bad stream token from email recipient " + email) return Stream.objects.get(email_token=token)
TypeError
dataset/ETHPy150Open zulip/zulip/zerver/lib/email_mirror.py/extract_and_validate
1,840
@defer.inlineCallbacks def run_as_leader(self): logger.info('Running as leader for service [{0}]. pid: [{1}]'.format(self.service_name, os.getpid())) try: notification_queue = yield self.cancellable(self.listener.listen(self.channels)) yield self.cancellable(self.process_initial()) while self.running: event = yield self.cancellable(notification_queue.get()) handler = self.get_handler(event.channel) if not handler: logger.warn('no handler for event [{0}]'.format(event.channel)) continue try: payload = self.get_payload(event) except (ValueError, __HOLE__): continue try: result = yield handler(payload) except defer.CancelledError: raise except Exception as e: logger.exception('unhandled exception in run_as_leader') except defer.CancelledError: pass finally: self.listener.unlisten(notification_queue)
TypeError
dataset/ETHPy150Open foundit/Piped/contrib/database/piped_database/service.py/PostgresListenerService.run_as_leader
1,841
def __init__(self, arguments): """ Runs the command specified as an argument with the options specified """ self.config_file = arguments['--config'] self.config = configparser.ConfigParser() self.client_id = None self.client_secret = None self.access_token = None if arguments['authorize']: self.client_id = arguments['--client_id'] self.client_secret = arguments['--client_secret'] self.authorize() elif not arguments['--version'] and not arguments['--help']: try: # Fail if config file doesn't exist or is missing information self.read_config() except (__HOLE__, configparser.NoOptionError, configparser.NoSectionError): print('Missing config information, please run ' '"misfit authorize"') else: # Everything is good! Get the requested resource(s) self.get_resource(arguments)
IOError
dataset/ETHPy150Open orcasgit/python-misfit/misfit/cli.py/MisfitCli.__init__
1,842
def read_config(self): """ Read credentials from the config file """ with open(self.config_file) as cfg: try: self.config.read_file(cfg) except __HOLE__: # Not python 3.X fallback self.config.readfp(cfg) self.client_id = self.config.get('misfit', 'client_id') self.client_secret = self.config.get('misfit', 'client_secret') self.access_token = self.config.get('misfit', 'access_token')
AttributeError
dataset/ETHPy150Open orcasgit/python-misfit/misfit/cli.py/MisfitCli.read_config
1,843
def main(argv): if len(argv) < 2: print "Usage: %s places.sqlite" % __program__ sys.exit(1) encoding = locale.getpreferredencoding() if encoding.upper() != "UTF-8": print "%s requires an UTF-8 capable console/terminal" % __program__ sys.exit(1) files_to_process = [] for input_glob in argv[1:]: files_to_process += glob.glob(input_glob) for input_file in files_to_process: firefox3 = Firefox3History(open(input_file)) for timestamp, entry_type, url, title in firefox3.Parse(): try: date_string = datetime.datetime(1970, 1, 1) date_string += datetime.timedelta(microseconds=timestamp) date_string = u"%s +00:00" % (date_string) except __HOLE__: date_string = timestamp except ValueError: date_string = timestamp output_string = "%s\t%s\t%s\t%s" % ( date_string, entry_type, url, title) print output_string.encode("UTF-8")
TypeError
dataset/ETHPy150Open google/grr/grr/parsers/firefox3_history.py/main
1,844
def __init__(self, host, port=22, username=None, password=None, key_filename=None, cwd=None, label=None, redirect_output=False, block=False): """ If username, password, and key_filename are None, the SSH will be use the default ssh key loaded into the system and will work if the destination host is configured to accept that key. Args: host: the server address to connect to. port: the server port to connect to (default 22) username: the username to authenticate as (default local username) password: password to authenticate or to unlock the private key key_filename: private key for authentication cwd: working dir for commands label: human readable label to associated with output redirect_output: If true remote stdout & stderr are redirected to stdout block: if True execute_command will block until the command is complete """ self._host = host self._port = port self._username = username self._password = password self._key_filename = key_filename self._ssh_client = None self._ssh_cls = None self.redirect_output = redirect_output self.block = block self.cwd = cwd self.label = label or "" if self._ssh_cls is None: try: import paramiko except __HOLE__: raise RuntimeError('''Must install paramiko to use ssh: \n''' ''' $ sudo pip install paramiko ''') # Suppress normal SSH messages logging.getLogger("paramiko").setLevel(logging.WARN) self._ssh_cls = paramiko.SSHClient self.log = logging.getLogger("SSHEntity")
ImportError
dataset/ETHPy150Open ucb-sts/sts/sts/entities/base.py/SSHEntity.__init__
1,845
@property def check_key_policy(self): """ Returns the the policy for missing host keys Default: accept all keys """ try: import paramiko except __HOLE__: raise RuntimeError('''Must install paramiko to use ssh: \n''' ''' $ sudo pip install paramiko ''') return paramiko.AutoAddPolicy()
ImportError
dataset/ETHPy150Open ucb-sts/sts/sts/entities/base.py/SSHEntity.check_key_policy
1,846
def dbid_to_obj(inp, objclass, raise_errors=True): """ Convert a #dbid to a valid object. Args: inp (str or int): A valid dbref. objclass (class): A valid django model to filter against. raise_errors (bool, optional): Whether to raise errors or return `None` on errors. Returns: obj (Object or None): An entity loaded from the dbref. Raises: Exception: If `raise_errors` is `True` and `objclass.objects.get(id=dbref)` did not return a valid object. """ dbid = dbref(inp) if not dbid: # we only convert #dbrefs return inp try: if int(inp) < 0: return None except __HOLE__: return None # if we get to this point, inp is an integer dbref; get the matching object try: return objclass.objects.get(id=inp) except Exception: if raise_errors: raise return inp
ValueError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/dbid_to_obj
1,847
def to_unicode(obj, encoding='utf-8', force_string=False): """ This decodes a suitable object to the unicode format. Args: obj (any): Object to decode to unicode. encoding (str, optional): The encoding type to use for the dedoding. force_string (bool, optional): Always convert to string, no matter what type `obj` is initially. Returns: result (unicode or any): Will return a unicode object if input was a string. If input was not a string, the original will be returned unchanged unless `force_string` is also set. Notes: One needs to encode the obj back to utf-8 before writing to disk or printing. That non-string objects are let through without conversion is important for e.g. Attributes. """ if force_string and not isinstance(obj, basestring): # some sort of other object. Try to # convert it to a string representation. if hasattr(obj, '__str__'): obj = obj.__str__() elif hasattr(obj, '__unicode__'): obj = obj.__unicode__() else: # last resort obj = str(obj) if isinstance(obj, basestring) and not isinstance(obj, unicode): try: obj = unicode(obj, encoding) return obj except UnicodeDecodeError: for alt_encoding in ENCODINGS: try: obj = unicode(obj, alt_encoding) return obj except __HOLE__: pass raise Exception("Error: '%s' contains invalid character(s) not in %s." % (obj, encoding)) return obj
UnicodeDecodeError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/to_unicode
1,848
def validate_email_address(emailaddress): """ Checks if an email address is syntactically correct. Args: emailaddress (str): Email address to validate. Returns: is_valid (bool): If this is a valid email or not. Notes. (This snippet was adapted from http://commandline.org.uk/python/email-syntax-check.) """ emailaddress = r"%s" % emailaddress domains = ("aero", "asia", "biz", "cat", "com", "coop", "edu", "gov", "info", "int", "jobs", "mil", "mobi", "museum", "name", "net", "org", "pro", "tel", "travel") # Email address must be more than 7 characters in total. if len(emailaddress) < 7: return False # Address too short. # Split up email address into parts. try: localpart, domainname = emailaddress.rsplit('@', 1) host, toplevel = domainname.rsplit('.', 1) except __HOLE__: return False # Address does not have enough parts. # Check for Country code or Generic Domain. if len(toplevel) != 2 and toplevel not in domains: return False # Not a domain name. for i in '-_.%+.': localpart = localpart.replace(i, "") for i in '-_.': host = host.replace(i, "") if localpart.isalnum() and host.isalnum(): return True # Email address is fine. else: return False # Email address has funny characters.
ValueError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/validate_email_address
1,849
def uses_database(name="sqlite3"): """ Checks if the game is currently using a given database. This is a shortcut to having to use the full backend name. Args: name (str): One of 'sqlite3', 'mysql', 'postgresql_psycopg2' or 'oracle'. Returns: uses (bool): If the given database is used or not. """ try: engine = settings.DATABASES["default"]["ENGINE"] except __HOLE__: engine = settings.DATABASE_ENGINE return engine == "django.db.backends.%s" % name
KeyError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/uses_database
1,850
def clean_object_caches(obj): """ Clean all object caches on the given object. Args: obj (Object instace): An object whose caches to clean. Notes: This is only the contents cache these days. """ global _TYPECLASSMODELS, _OBJECTMODELS if not _TYPECLASSMODELS: from evennia.typeclasses import models as _TYPECLASSMODELS if not obj: return # contents cache try: _SA(obj, "_contents_cache", None) except __HOLE__: pass # on-object property cache [_DA(obj, cname) for cname in viewkeys(obj.__dict__) if cname.startswith("_cached_db_")] try: hashid = _GA(obj, "hashid") _TYPECLASSMODELS._ATTRIBUTE_CACHE[hashid] = {} except AttributeError: pass
AttributeError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/clean_object_caches
1,851
def check_evennia_dependencies(): """ Checks the versions of Evennia's dependencies including making some checks for runtime libraries. Returns: result (bool): `False` if a show-stopping version mismatch is found. """ # check main dependencies from evennia.server.evennia_launcher import check_main_evennia_dependencies not_error = check_main_evennia_dependencies() errstring = "" # South is no longer used ... if 'south' in settings.INSTALLED_APPS: errstring += "\n ERROR: 'south' found in settings.INSTALLED_APPS. " \ "\n South is no longer used. If this was added manually, remove it." not_error = False # IRC support if settings.IRC_ENABLED: try: import twisted.words twisted.words # set to avoid debug info about not-used import except __HOLE__: errstring += "\n ERROR: IRC is enabled, but twisted.words is not installed. Please install it." \ "\n Linux Debian/Ubuntu users should install package 'python-twisted-words', others" \ "\n can get it from http://twistedmatrix.com/trac/wiki/TwistedWords." not_error = False errstring = errstring.strip() if errstring: mlen = max(len(line) for line in errstring.split("\n")) logger.log_err("%s\n%s\n%s" % ("-"*mlen, errstring, '-'*mlen)) return not_error
ImportError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/check_evennia_dependencies
1,852
def has_parent(basepath, obj): """ Checks if `basepath` is somewhere in `obj`s parent tree. Args: basepath (str): Python dotpath to compare against obj path. obj (any): Object whose path is to be checked. Returns: has_parent (bool): If the check was successful or not. """ try: return any(cls for cls in obj.__class__.mro() if basepath == "%s.%s" % (cls.__module__, cls.__name__)) except (TypeError, __HOLE__): # this can occur if we tried to store a class object, not an # instance. Not sure if one should defend against this. return False
AttributeError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/has_parent
1,853
def mod_import(module): """ A generic Python module loader. Args: module (str, module): This can be either a Python path (dot-notation like `evennia.objects.models`), an absolute path (e.g. `/home/eve/evennia/evennia/objects.models.py`) or an already imported module object (e.g. `models`) Returns: module (module or None): An imported module. If the input argument was already a module, this is returned as-is, otherwise the path is parsed and imported. Returns `None` and logs error if import failed. """ if not module: return None if isinstance(module, types.ModuleType): # if this is already a module, we are done mod = module else: # first try to import as a python path try: mod = __import__(module, fromlist=["None"]) except ImportError as ex: # check just where the ImportError happened (it could have been # an erroneous import inside the module as well). This is the # trivial way to do it ... if str(ex) != "Import by filename is not supported.": raise # error in this module. Try absolute path import instead if not os.path.isabs(module): module = os.path.abspath(module) path, filename = module.rsplit(os.path.sep, 1) modname = re.sub(r"\.py$", "", filename) try: result = imp.find_module(modname, [path]) except __HOLE__: logger.log_trace("Could not find module '%s' (%s.py) at path '%s'" % (modname, modname, path)) return try: mod = imp.load_module(modname, *result) except ImportError: logger.log_trace("Could not find or import module %s at path '%s'" % (modname, path)) mod = None # we have to close the file handle manually result[0].close() return mod
ImportError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/mod_import
1,854
def fuzzy_import_from_module(path, variable, default=None, defaultpaths=None): """ Import a variable based on a fuzzy path. First the literal `path` will be tried, then all given `defaultpaths` will be prepended to see a match is found. Args: path (str): Full or partial python path. variable (str): Name of variable to import from module. default (string, optional): Default value to use if a variable fails to be extracted. Ignored if `variable` is not given. defaultpaths (iterable, options): Python paths to attempt in order if importing directly from `path` doesn't work. Returns: value (any): The variable imported from the module, or `default`, if not found. """ paths = [path] + make_iter(defaultpaths) for modpath in paths: try: mod = import_module(path) except __HOLE__ as ex: if not str(ex).startswith ("No module named %s" % path): # this means the module was found but it # triggers an ImportError on import. raise ex return getattr(mod, variable, default) return default
ImportError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/fuzzy_import_from_module
1,855
def class_from_module(path, defaultpaths=None): """ Return a class from a module, given the module's path. This is primarily used to convert db_typeclass_path:s to classes. Args: path (str): Full Python dot-path to module. defaultpaths (iterable, optional): If a direc import from `path` fails, try subsequent imports by prepending those paths to `path`. Returns: class (Class): An uninstatiated class recovered from path. Raises: ImportError: If all loading failed. """ cls = None if defaultpaths: paths = [path] + ["%s.%s" % (dpath, path) for dpath in make_iter(defaultpaths)] if defaultpaths else [] else: paths = [path] for testpath in paths: if "." in path: testpath, clsname = testpath.rsplit(".", 1) else: raise ImportError("the path '%s' is not on the form modulepath.Classname." % path) try: mod = import_module(testpath, package="evennia") except ImportError: if len(trace()) > 2: # this means the error happened within the called module and # we must not hide it. exc = sys.exc_info() raise_(exc[1], None, exc[2]) else: # otherwise, try the next suggested path continue try: cls = getattr(mod, clsname) break except __HOLE__: if len(trace()) > 2: # AttributeError within the module, don't hide it exc = sys.exc_info() raise_(exc[1], None, exc[2]) if not cls: err = "Could not load typeclass '%s'" % path if defaultpaths: err += "\nPaths searched:\n %s" % "\n ".join(paths) else: err += "." raise ImportError(err) return cls # alias
AttributeError
dataset/ETHPy150Open evennia/evennia/evennia/utils/utils.py/class_from_module
1,856
def parse_command(self, com_arg, com_string): """Parse the values given in the command line.""" try: return tuple((int(item.strip()) for item in com_string.split(':'))) except __HOLE__: raise CommandError('You failed to provide "%s" with one or two ' 'values of type int.\nExample: --%s=2:5' % (com_arg, com_arg))
ValueError
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/reviews/management/commands/fill-database.py/Command.parse_command
1,857
def __digify_args(*args): """Covert all arguments to a number""" numbers = [] for arg in args: if isinstance(arg, MathString): if '.' in arg: numtype = float else: numtype = int try: arg = numtype(arg) except __HOLE__: raise MathError("The value '%s' is not a number" % str(arg)) numbers.append(arg) return numbers
ValueError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/util.py/MathString.__digify_args
1,858
def __int__(self): try: return int(str(self)) except __HOLE__: return int(float(self))
ValueError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/util.py/MathString.__int__
1,859
def __long__(self): try: return long(str(self)) except __HOLE__: return long(float(self))
ValueError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/util.py/MathString.__long__
1,860
def setup(user=None, group=None, file_limit=None, core_dumps=None): """Set the processes user, group, and file limits""" if file_limit: try: resource.setrlimit(resource.RLIMIT_NOFILE, (file_limit, file_limit)) except ValueError, ex: log.error("Failed to set limit on open files: %s" % ex) sys.exit(1) if group: if not group.isdigit(): try: group = grp.getgrnam(group)[2] except KeyError: log.error("Unknown group '%s'" % group) sys.exit(1) else: group = int(group) try: os.setregid(group, group) except OSError, ex: log.error("Failed to set gid: %s" % ex) sys.exit(1) if user: if not user.isdigit(): try: user = pwd.getpwnam(user)[2] except KeyError: log.error("Unknown user '%s'" % user) sys.exit(1) else: user = int(user) try: os.setreuid(user, user) except OSError, ex: log.error("Failed to set uid: %s" % ex) sys.exit(1) if core_dumps: try: resource.setrlimit(resource.RLIMIT_CORE, (-1, -1)) except __HOLE__, ex: log.error("Failed to set limit on core dumps: %s" % ex) sys.exit(1) if not os.path.isdir(core_dumps): try: os.makedirs(core_dumps) except OSError, ex: log.error("Failed to create directory %s" % core_dumps) sys.exit(1) else: if not os.access(core_dumps, os.R_OK|os.W_OK|os.X_OK): log.error("Insufficient permissions on %s" % core_dumps) sys.exit(1)
ValueError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/util.py/setup
1,861
def daemonize(pid_file, cwd="/"): """Background the current process""" log.debug("daemonizing process") # BROKEN: the pid file may have already been created by write_pid # however, I'm not even using nagcat in daemon mode right now so # I'll just leave this commented out for now... # Also, this has a major race condition... #try: # # A trivial check to see if we are already running # pidfd = open(pid_file) # pid = int(pidfd.readline().strip()) # pidfd.close() # os.kill(pid, 0) #except (IOError, OSError): # pass # Assume all is well if the test raised errors #else: # log.error("PID file exits and process %s is running!" % pid) # sys.exit(1) try: pidfd = open(pid_file, 'w') except __HOLE__, ex: log.error("Failed to open PID file %s" % pid_file) log.error("Error: %s" % (ex,)) sys.exit(1) if os.fork() > 0: os._exit(0) os.chdir(cwd) os.setsid() if os.fork() > 0: os._exit(0) pidfd.write("%s\n" % os.getpid()) pidfd.close()
IOError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/util.py/daemonize
1,862
def write_pid(pid_file): """Write out the current PID""" try: pidfd = open(pid_file, 'w') except __HOLE__, ex: log.error("Failed to open PID file %s" % pid_file) log.error("Error: %s" % (ex,)) sys.exit(1) pidfd.write("%s\n" % os.getpid()) pidfd.close()
IOError
dataset/ETHPy150Open marineam/nagcat/python/nagcat/util.py/write_pid
1,863
def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) show_path = mobj.group('show_path') episode_path = mobj.group('episode_path') is_playlist = True if mobj.group('is_playlist') else False webpage = self._download_webpage(url, episode_path) # Extract the value of `bootstrappedData` from the Javascript in the page. bootstrappedDataJS = self._search_regex(r'var bootstrappedData = ({.*});', webpage, episode_path) try: bootstrappedData = json.loads(bootstrappedDataJS) except __HOLE__ as ve: errmsg = '%s: Failed to parse JSON ' % episode_path raise ExtractorError(errmsg, cause=ve) # Downloading videos from a /videos/playlist/ URL needs to be handled differently. # NOTE: We are only downloading one video (the current one) not the playlist if is_playlist: collections = bootstrappedData['playlists']['collections'] collection = self.find_collection_by_linkURL(collections, show_path) video_info = self.find_video_info(collection, episode_path) show_title = video_info['showTitle'] segment_ids = [video_info['videoPlaybackID']] else: collections = bootstrappedData['show']['collections'] collection, video_info = self.find_collection_containing_video(collections, episode_path) show = bootstrappedData['show'] show_title = show['title'] segment_ids = [clip['videoPlaybackID'] for clip in video_info['clips']] episode_id = video_info['id'] episode_title = video_info['title'] episode_description = video_info['description'] episode_duration = video_info.get('duration') entries = [] for part_num, segment_id in enumerate(segment_ids): segment_url = 'http://www.adultswim.com/videos/api/v0/assets?id=%s&platform=mobile' % segment_id segment_title = '%s - %s' % (show_title, episode_title) if len(segment_ids) > 1: segment_title += ' Part %d' % (part_num + 1) idoc = self._download_xml( segment_url, segment_title, 'Downloading segment information', 'Unable to download segment information') segment_duration = float_or_none( xpath_text(idoc, './/trt', 'segment duration').strip()) formats = [] file_els = idoc.findall('.//files/file') for file_el in file_els: bitrate = file_el.attrib.get('bitrate') ftype = file_el.attrib.get('type') formats.append({ 'format_id': '%s_%s' % (bitrate, ftype), 'url': file_el.text.strip(), # The bitrate may not be a number (for example: 'iphone') 'tbr': int(bitrate) if bitrate.isdigit() else None, 'quality': 1 if ftype == 'hd' else -1 }) self._sort_formats(formats) entries.append({ 'id': segment_id, 'title': segment_title, 'formats': formats, 'duration': segment_duration, 'description': episode_description }) return { '_type': 'playlist', 'id': episode_id, 'display_id': episode_path, 'entries': entries, 'title': '%s - %s' % (show_title, episode_title), 'description': episode_description, 'duration': episode_duration }
ValueError
dataset/ETHPy150Open yasoob/youtube-dl-GUI/youtube_dl/extractor/adultswim.py/AdultSwimIE._real_extract
1,864
def get_class_by_model_or_name(cls): collection_class = None if isinstance(cls, basestring): app_label, model_name = cls.split('.') try: collection_class=get_model(app_label, model_name) except __HOLE__: # This is a Django internal thing. If all the models are not yet loaded, # you can't get one out of the cache so it will try to do an import and then # you get circular dependencies. This just prepopulates the cache with all the models from django.db.models.loading import cache as app_cache app_cache._populate() # If it fails here you actually didn't specify a valid model class # remember it's "users.User" not just "User" collection_class=get_model(app_label, model_name) else: collection_class = cls return collection_class
ImportError
dataset/ETHPy150Open jumoconnect/openjumo/jumodjango/etc/cache.py/get_class_by_model_or_name
1,865
def get_lookup(channel): """ find the lookup class for the named channel. this is used internally """ try: lookup_label = settings.AJAX_LOOKUP_CHANNELS[channel] except (__HOLE__, AttributeError): raise ImproperlyConfigured("settings.AJAX_LOOKUP_CHANNELS not configured correctly for %r" % channel) if isinstance(lookup_label,dict): # 'channel' : dict(model='app.model', search_field='title' ) # generate a simple channel dynamically return make_channel( lookup_label['model'], lookup_label['search_field'] ) else: # 'channel' : ('app.module','LookupClass') # from app.module load LookupClass and instantiate lookup_module = __import__( lookup_label[0],{},{},['']) lookup_class = getattr(lookup_module,lookup_label[1] ) return lookup_class()
KeyError
dataset/ETHPy150Open caseywstark/colab/colab/apps/ajax_select/__init__.py/get_lookup
1,866
def register_argparse_afc(cmdargs): import argparse import sys import afcapplicationdirectory import afccrashlogdirectory import afcmediadirectory import afcroot import time import posixpath import pprint def printdir(afc, path, recurse): dirlist = [] rows = [] colmax = [0, 0, 0, 0] print "afc: ", path for name in afc.listdir(path): isdir = u'' info = afc.lstat(posixpath.join(path, name)) if info.st_ifmt == stat.S_IFDIR: isdir = u'/' dirlist.append(posixpath.join(path, name)) types = { stat.S_IFSOCK: u's', stat.S_IFLNK: u'l', stat.S_IFREG: u'-', stat.S_IFBLK: u'b', stat.S_IFDIR: u'd', stat.S_IFCHR: u'c', stat.S_IFIFO: u'p' } modtime = long(info.st_mtime) if long(time.time()) - modtime > (60*60*24*365): # only show year if its over a year old (ls style) strtime = time.strftime(u'%d %b %Y', time.gmtime(modtime)) else: strtime = time.strftime(u'%d %b %H:%M', time.gmtime(modtime)) islink = u'' if info.st_ifmt == stat.S_IFLNK: islink = u' -> ' + afc.readlink(posixpath.join(path, name)) row = ( types[info.st_ifmt], info.st_size, strtime, name + isdir + islink ) rows.append(row) for i in range(len(row)): if len(row[i]) > colmax[i]: colmax[i] = len(row[i]) for row in rows: print( row[0].ljust(colmax[0]) + u' ' + row[1].rjust(colmax[1]) + u' ' + row[2].ljust(colmax[2]) + u' ' + row[3]) if recurse: for name in dirlist: print(u'\n' + name) printdir(afc, name, recurse) def get_afc(args, dev): retval = None if args.path.startswith(u'/var/mobile/Media'): retval = afcmediadirectory.AFCMediaDirectory(dev) args.path = args.path[len(u'/var/mobile/Media'):] elif args.m: retval = afcmediadirectory.AFCMediaDirectory(dev) elif args.c: retval = afccrashlogdirectory.AFCCrashLogDirectory(dev) elif args.app is not None: retval = afcapplicationdirectory.AFCApplicationDirectory( dev, args.app.decode(u'utf-8') ) else: retval = afcroot.AFCRoot(dev) return retval def cmd_ls(args, dev): afc = get_afc(args, dev) printdir(afc, args.path.decode(u'utf-8'), args.r) afc.disconnect() def cmd_mkdir(args, dev): afc = get_afc(args, dev) afc.mkdir(args.path) afc.disconnect() def cmd_rm(args, dev): afc = get_afc(args, dev) afc.remove(args.path) afc.disconnect() def cmd_ln(args, dev): # XXX unable to make linking work? afc = get_afc(args, dev) # if we're using the default mediadirectory then adjust the link if args.link.startswith(u'/var/mobile/Media'): args.link = args.link[len(u'/var/mobile/Media'):] if args.s: afc.symlink(args.path, args.link) else: afc.link(args.path, args.link) afc.disconnect() def cmd_get(args, dev): dest = args.dest if dest[-1] == os.sep: # trailing seperator so dest has same name as src dest = posixpath.join(dest, posixpath.basename(args.path)) afc = get_afc(args, dev) s = afc.open(args.path, u'r') d = open(dest, u'w+') d.write(s.readall()) d.close() s.close() afc.disconnect() def cmd_put(args, dev): if args.path[-1] == os.sep: # trailing seperator so dest has same name as src args.path = posixpath.join(args.path, posixpath.basename(args.src)) afc = get_afc(args, dev) d = afc.open(args.path, u'w') s = open(args.src, u'r') d.write(s.read()) s.close() d.close() afc.disconnect() def preview_file(afc, path): s = afc.open(path, u'r') d = s.readall() s.close() p = dict_from_plist_encoding(d) if p is not None: pprint.pprint(p) else: print(d) # XXX add extra preview code for other common types def cmd_view(args, dev): afc = get_afc(args, dev) path = args.path.decode(u'utf-8') files = [] try: # check for directory preview for f in afc.listdir(path): files.append(posixpath.join(path, f)) except __HOLE__: files = [path] # its not a directory for f in files: preview_file(afc, f) afc.disconnect() # afc command afcparser = cmdargs.add_parser( u'afc', help=u'commands to manipulate files via afc' ) afcgroup = afcparser.add_mutually_exclusive_group() afcgroup.add_argument( u'-a', metavar=u'app', dest=u'app', help=u'reverse domain name of application; device paths become relative to app container' ) afcgroup.add_argument( u'-c', action=u'store_true', help=u'crashlogs; device paths become relative to crash log container' ) afcgroup.add_argument( u'-m', action=u'store_true', help=u'device paths become relative to /var/mobile/media (saves typing)' ) afccmd = afcparser.add_subparsers() # ls command lscmd = afccmd.add_parser( u'ls', help=u'lists the contents of the directory' ) lscmd.add_argument( u'-r', action=u'store_true', help=u'if specified listing is recursive' ) lscmd.add_argument( u'path', help=u'path on the device to list' ) lscmd.set_defaults(func=cmd_ls) # mkdir command mkdircmd = afccmd.add_parser( u'mkdir', help=u'creates a directory' ) mkdircmd.add_argument( u'path', help=u'path of the dir to create' ) mkdircmd.set_defaults(func=cmd_mkdir) # rmdir / rm rmcmd = afccmd.add_parser( u'rm', help=u'remove directory/file' ) rmcmd.add_argument( u'path', help=u'the path to delete' ) rmcmd.set_defaults(func=cmd_rm) rmdircmd = afccmd.add_parser( u'rmdir', help=u'remove directory/file' ) rmdircmd.add_argument( u'path', help=u'the path to delete' ) rmdircmd.set_defaults(func=cmd_rm) # ln lncmd = afccmd.add_parser( u'ln', help=u'create a link (symbolic or hard)' ) lncmd.add_argument( u'path', help=u'the pre-existing path to link to' ) lncmd.add_argument( u'link', help=u'the path for the link' ) lncmd.add_argument( u'-s', action=u'store_true', help=u'create a symbolic link' ) lncmd.set_defaults(func=cmd_ln) # get getcmd = afccmd.add_parser( u'get', help=u'retrieve a file from the device' ) getcmd.add_argument( u'path', help=u'path on the device to retrieve' ) getcmd.add_argument( u'dest', help=u'local path to write file to' ) getcmd.set_defaults(func=cmd_get) # put putcmd = afccmd.add_parser( u'put', help=u'upload a file from the device' ) putcmd.add_argument( u'src', help=u'local path to read file from' ) putcmd.add_argument( u'path', help=u'path on the device to write' ) putcmd.set_defaults(func=cmd_put) # view viewcmd = afccmd.add_parser( u'view', help=u'retrieve a file from the device and preview as txt' ) viewcmd.add_argument( u'path', help=u'path on the device to retrieve' ) viewcmd.set_defaults(func=cmd_view)
OSError
dataset/ETHPy150Open mountainstorm/MobileDevice/afcmediadirectory.py/register_argparse_afc
1,867
def manage(module, **params): """Return a proxy for a DB-API module that automatically pools connections. Given a DB-API 2.0 module and pool management parameters, returns a proxy for the module that will automatically pool connections, creating new connection pools for each distinct set of connection arguments sent to the decorated module's connect() function. :param module: a DB-API 2.0 database module :param poolclass: the class used by the pool module to provide pooling. Defaults to :class:`.QueuePool`. :param \*\*params: will be passed through to *poolclass* """ try: return proxies[module] except __HOLE__: return proxies.setdefault(module, _DBProxy(module, **params))
KeyError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/pool.py/manage
1,868
def _close_connection(self, connection): self.logger.debug("Closing connection %r", connection) try: self._dialect.do_close(connection) except (SystemExit, __HOLE__): raise except: self.logger.error("Exception closing connection %r", connection, exc_info=True)
KeyboardInterrupt
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/pool.py/Pool._close_connection
1,869
def connect(self): """Return a DBAPI connection from the pool. The connection is instrumented such that when its ``close()`` method is called, the connection will be returned to the pool. """ if not self._use_threadlocal: return _ConnectionFairy._checkout(self) try: rec = self._threadconns.current() except __HOLE__: pass else: if rec is not None: return rec._checkout_existing() return _ConnectionFairy._checkout(self, self._threadconns)
AttributeError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/pool.py/Pool.connect
1,870
def _return_conn(self, record): """Given a _ConnectionRecord, return it to the :class:`.Pool`. This method is called when an instrumented DBAPI connection has its ``close()`` method called. """ if self._use_threadlocal: try: del self._threadconns.current except __HOLE__: pass self._do_return_conn(record)
AttributeError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/pool.py/Pool._return_conn
1,871
def dispose(self): """Dispose of this pool.""" for conn in self._all_conns: try: conn.close() except (SystemExit, __HOLE__): raise except: # pysqlite won't even let you close a conn from a thread # that didn't create it pass self._all_conns.clear()
KeyboardInterrupt
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/pool.py/SingletonThreadPool.dispose
1,872
def _do_get(self): try: c = self._conn.current() if c: return c except __HOLE__: pass c = self._create_connection() self._conn.current = weakref.ref(c) if len(self._all_conns) >= self.size: self._cleanup() self._all_conns.add(c) return c
AttributeError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/pool.py/SingletonThreadPool._do_get
1,873
def get_pool(self, *args, **kw): key = self._serialize(*args, **kw) try: return self.pools[key] except __HOLE__: self._create_pool_mutex.acquire() try: if key not in self.pools: kw.pop('sa_pool_key', None) pool = self.poolclass( lambda: self.module.connect(*args, **kw), **self.kw) self.pools[key] = pool return pool else: return self.pools[key] finally: self._create_pool_mutex.release()
KeyError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/pool.py/_DBProxy.get_pool
1,874
def dispose(self, *args, **kw): """Dispose the pool referenced by the given connect arguments.""" key = self._serialize(*args, **kw) try: del self.pools[key] except __HOLE__: pass
KeyError
dataset/ETHPy150Open goFrendiAsgard/kokoropy/kokoropy/packages/sqlalchemy/pool.py/_DBProxy.dispose
1,875
def test_get_live_channels(self): # This is a tricky one, not sure how to properly test it.. test_channel_count = 10 live_channels = self.twitch.get_live_channels() error_count = 0 test_count = 0 for ch in live_channels: ret_json = requests.get(self.TWITCH_REST+'/streams/'+ch).json() try: ok_(ret_json['stream']['channel']['display_name'] == ch) except __HOLE__: error_count += 1 test_count += 1 if test_count >= test_channel_count: break # there is time difference between get live and check live # it is possible that channel went offline between these 2 api calls # so we just expect 80% of tested channels are really live on the # 2nd api call ok_((float(error_count) / test_count) < 0.20, 'test:{}, error:{}'.format(test_count, error_count))
KeyError
dataset/ETHPy150Open KavenC/Linot/tests/services/test_twitch_notifier.py/TestTwitchEngine.test_get_live_channels
1,876
def tearDown(self): try: os.remove(self.service.SUB_FILE) except __HOLE__: pass
OSError
dataset/ETHPy150Open KavenC/Linot/tests/services/test_twitch_notifier.py/TestService.tearDown
1,877
def test_subscribe_one(self): try: os.remove(self.service.SUB_FILE) except __HOLE__: pass self.service.start() threading.Event().wait(.1) self.service.stop() self.service._twitch.set_exists_channel_list(['testch1', 'testch2']) fake_sender = CommandSubmitter('test', 'fake_sender') self.service._subscribe(['testch1'], fake_sender) ok_('testch1' in [x.lower() for x in self.service._sublist[fake_sender]], 'sublist = '+str(self.service._sublist[fake_sender])) ok_('not found' not in ' '.join(interfaces.get('test').msg_queue[fake_sender])) ok_(self.service._channel_sub_count['testch1'] == 1) fake_sender2 = CommandSubmitter('test', 'fake_sender2') self.service._subscribe(['testch1'], fake_sender2) ok_(self.service._channel_sub_count['testch1'] == 2)
OSError
dataset/ETHPy150Open KavenC/Linot/tests/services/test_twitch_notifier.py/TestService.test_subscribe_one
1,878
def test_subscribe_one_exists(self): try: os.remove(self.service.SUB_FILE) except __HOLE__: pass self.service.start() threading.Event().wait(.1) self.service.stop() self.service._twitch.set_exists_channel_list(['testch1', 'testch2']) fake_sender = CommandSubmitter('test', 'fake_sender') self.service._sublist[fake_sender] = ['testch1'] self.service._subscribe(['testch1'], fake_sender) ok_(self.service._sublist[fake_sender].count('testch1') == 1, 'sublist = '+str(self.service._sublist[fake_sender])) ok_('not found' not in ' '.join(interfaces.get('test').msg_queue[fake_sender]))
OSError
dataset/ETHPy150Open KavenC/Linot/tests/services/test_twitch_notifier.py/TestService.test_subscribe_one_exists
1,879
def test_subscribe_multi(self): try: os.remove(self.service.SUB_FILE) except __HOLE__: pass self.service.start() threading.Event().wait(.1) self.service.stop() self.service._twitch.set_exists_channel_list(['testch1', 'testch2']) fake_sender = CommandSubmitter('test', 'fake_sender') self.service._subscribe(['testch1', 'testch2'], fake_sender) ok_('testch1' in self.service._sublist[fake_sender], 'sublist = '+str(self.service._sublist[fake_sender])) ok_('testch2' in self.service._sublist[fake_sender], 'sublist = '+str(self.service._sublist[fake_sender])) ok_('not found' not in ' '.join(interfaces.get('test').msg_queue[fake_sender]))
OSError
dataset/ETHPy150Open KavenC/Linot/tests/services/test_twitch_notifier.py/TestService.test_subscribe_multi
1,880
def test_list_users_no_user(self): # issue #10, list user hit exception while there is no user config['interface']['test'] = {'admin_id': 'test_admin'} try: os.remove(self.service.SUB_FILE) except __HOLE__: pass self.service.start() threading.Event().wait(.1) self.service.stop() fake_sender = CommandSubmitter('test', 'test_admin') self.service._list_users([], fake_sender) # check there is a response to user ok_(len(''.join(interfaces.get('test').msg_queue[fake_sender])) > 0)
OSError
dataset/ETHPy150Open KavenC/Linot/tests/services/test_twitch_notifier.py/TestService.test_list_users_no_user
1,881
def test_assert_no_diff_dict(self): dict1 = {'I love': 'you'} dict2 = {'I love': 'moo'} try: self.failIfDiff(dict1, dict2) except __HOLE__, e: self.failIfDiff(e.message, """\n--- First \n\n+++ Second \n\n@@ -1,1 +1,1 @@\n\n-'I love':'you'\n+'I love':'moo'\n""")
AssertionError
dataset/ETHPy150Open ericholscher/django-test-utils/test_project/test_app/tests/assertions_tests.py/TestAssertions.test_assert_no_diff_dict
1,882
def test_assert_no_diff_list(self): list1 = ['I love', 'you'] list2 = ['I love', 'to moo'] try: self.failIfDiff(list1, list2) except __HOLE__, e: self.failIfDiff(e.message, """\n--- First \n\n+++ Second \n\n@@ -1,2 +1,2 @@\n\n 'I love'\n-'you'\n+'to moo'\n""")
AssertionError
dataset/ETHPy150Open ericholscher/django-test-utils/test_project/test_app/tests/assertions_tests.py/TestAssertions.test_assert_no_diff_list
1,883
def _convert_float(self, number): try: return float(number) except __HOLE__: return None
ValueError
dataset/ETHPy150Open DenisCarriere/geocoder/geocoder/location.py/Location._convert_float
1,884
def emit(self, record): """ Emit a record. """ try: eventDict = { 'category': self.category, 'logLevel': record.levelname, 'logName': record.name, 'filename': record.pathname, 'lineno': record.lineno, 'funcName': record.funcName, } eventDict['isError'] = record.levelno >= logging.ERROR if isinstance(record.args, dict): eventDict.update(record.args) message = record.getMessage() if record.exc_info: exc_type, exc_value, exc_traceback = record.exc_info eventDict['failure'] = Failure(exc_value, exc_type, exc_traceback) self.publisher.msg(why=message, **eventDict) else: self.publisher.msg(message, **eventDict) except (KeyboardInterrupt, __HOLE__): raise except: self.handleError(record)
SystemExit
dataset/ETHPy150Open mochi/udplog/udplog/twisted.py/TwistedLogHandler.emit
1,885
def datagramReceived(self, datagram, addr): data = datagram.rstrip() try: category, event = udplog.unserialize(data) event['category'] = category except (__HOLE__, TypeError): log.err() return self.callback(event)
ValueError
dataset/ETHPy150Open mochi/udplog/udplog/twisted.py/UDPLogProtocol.datagramReceived
1,886
def unregister(self, consumer): try: self._consumers.remove(consumer) except __HOLE__: pass
KeyError
dataset/ETHPy150Open mochi/udplog/udplog/twisted.py/Dispatcher.unregister
1,887
def check_if_installed(self): try: find_module("locust") self.already_installed = True except __HOLE__: self.log.error("LocustIO is not installed, see http://docs.locust.io/en/latest/installation.html") return False return True
ImportError
dataset/ETHPy150Open Blazemeter/taurus/bzt/modules/locustio.py/LocustIO.check_if_installed
1,888
def protorpc_to_endpoints_error(self, status, body): """Convert a ProtoRPC error to the format expected by Google Endpoints. If the body does not contain an ProtoRPC message in state APPLICATION_ERROR the status and body will be returned unchanged. Args: status: HTTP status of the response from the backend body: JSON-encoded error in format expected by Endpoints frontend. Returns: Tuple of (http status, body) """ try: rpc_error = protojson.decode_message(remote.RpcStatus, body) except (__HOLE__, messages.ValidationError): rpc_error = remote.RpcStatus() if rpc_error.state == remote.RpcStatus.State.APPLICATION_ERROR: error_class = _ERROR_NAME_MAP.get(rpc_error.error_name) if error_class: status, body = self.__write_error(error_class.http_status, rpc_error.error_message) return status, body
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/ext/endpoints/apiserving.py/_ApiServer.protorpc_to_endpoints_error
1,889
def emit(self, record): try: data = self.format(record) tag = ('%s.%s' % (self.tag, record.levelname.lower())).lstrip('.') self.fluent.send(data, tag, record.created) except (__HOLE__, SystemExit): raise except: self.handleError(record)
KeyboardInterrupt
dataset/ETHPy150Open yosisa/pyfluent/pyfluent/logging.py/SafeFluentHandler.emit
1,890
def optimize(self, optimizer=None, start=None, **kwargs): self._IN_OPTIMIZATION_ = True if self.mpi_comm==None: super(SSMRD, self).optimize(optimizer,start,**kwargs) elif self.mpi_comm.rank==0: super(SSMRD, self).optimize(optimizer,start,**kwargs) self.mpi_comm.Bcast(np.int32(-1),root=0) elif self.mpi_comm.rank>0: x = self.optimizer_array.copy() flag = np.empty(1,dtype=np.int32) while True: self.mpi_comm.Bcast(flag,root=0) if flag==1: try: self.optimizer_array = x self._fail_count = 0 except (LinAlgError, ZeroDivisionError, __HOLE__): if self._fail_count >= self._allowed_failures: raise self._fail_count += 1 elif flag==-1: break else: self._IN_OPTIMIZATION_ = False raise Exception("Unrecognizable flag for synchronization!") self._IN_OPTIMIZATION_ = False
ValueError
dataset/ETHPy150Open SheffieldML/GPy/GPy/models/ss_mrd.py/SSMRD.optimize
1,891
def _clear_static(self): try: shutil.rmtree(settings.STATIC_ROOT) except (OSError, __HOLE__): pass
IOError
dataset/ETHPy150Open sergei-maertens/django-systemjs/tests/tests/test_management.py/ClearStaticMixin._clear_static
1,892
def handle(self, *args, **options): try: username = args[0] except __HOLE__: raise CommandError(_("You must provide the username to publish the" " form to.")) # make sure user exists try: User.objects.get(username=username) except User.DoesNotExist: raise CommandError(_("The user '%s' does not exist.") % username) try: input_file = args[1] except IndexError: raise CommandError(_("You must provide the path to restore from.")) else: input_file = os.path.realpath(input_file) num_instances, num_restored = restore_backup_from_zip( input_file, username) sys.stdout.write("Restored %d of %d submissions\n" % (num_restored, num_instances))
IndexError
dataset/ETHPy150Open kobotoolbox/kobocat/onadata/apps/logger/management/commands/restore_backup.py/Command.handle
1,893
def saveTrace(self, trace, traceFile, dataFileName = None, dataFileFormat = "rvct", frameMarkers = [], initFuncName = "init", uninitFuncName = "uninit", playFuncName = "play", playFrameFuncName = "playFrame", frameFuncName = "frame", arrayPrefix = "array", playerArgument = "context", insertCopyright = True): try: library = self.analyzer.project.targets["code"].library config = self.analyzer.project.config except (AttributeError, KeyError): raise RuntimeError("API configuration not found.") def arrayId(array): assert isinstance(array, Trace.Array) return (array.__class__, array.id) def objectId(obj): assert isinstance(obj, Trace.Object) return "%s_%x_%x" % (obj.cls.name.lower(), obj.ns, obj.id) task = Task.startTask("c-export", "Formatting source", len(trace.events)) indent = " " * 3 # Collect all values for all events task = Task.startTask("c-export", "Collecting data", len(trace.events)) values = [] [values.extend(e.values.values()) for e in trace.events] # Collect arrays arrays = OrderedDict([(arrayId(v), v) for v in reversed(values) if isinstance(v, Trace.Array)]) # Check that the external data format is supported if dataFileName: assert dataFileName.endswith(".s"), "Unsupported external data file type. Use one of: s" assert dataFileFormat in ("gcc", "rvct"), "Unsupported external data format. Use one of: gcc, rvct" dataFile = open(dataFileName, "w") else: dataFile = None # Calculate maximum sizes of arrays arraySizes = dict([(a, 0) for a in arrays.keys()]) for value in values: if isinstance(value, Trace.Array): a = arrayId(value) arraySizes[a] = max(len(value), arraySizes[a]) # Figure out the C types of arrays and objects classes = {} objectTypes = {} arrayTypes = {} outValueObjects = set() # Objects that are acquired through a function arrayVariants = DefaultDict(list) usePersistentArrays = False # Use arrays whose contents must be kept up to date # even after passing them to the API def registerObject(event, name, value): if not value.cls in classes: classes[value.cls] = {} function = self.analyzer.lookupFunction(event) # Object has already been registered if not name or function.parameters[name].isOut: outValueObjects.add(value) if objectId(value) in classes[value.cls]: return classes[value.cls][objectId(value)] = value for cType, nativeTypeName in library.typeMap.items(): if cType.name == value.cls.name: objectTypes[value] = cType break else: self.analyzer.reportWarning("Unknown class: <%s>" % value.cls.name) # Create a fake type name for this class objectTypes[value] = value.cls.name def registerArray(event, name, value): function = self.analyzer.lookupFunction(event) if name: cType = function.parameters[name].type else: cType = function.type # Extract an essential type for the array discarding all qualifiers and modifiers cType = Library.Type(cType.name) # Get the real, non-typedef'd type as well realType = library.resolveType(cType) # If this is a void type, use the original type instead if realType.name == "void": cType = arrayTypeMap[value.__class__] arrayTypes[arrayId(value)] = cType # If this is an object array, register the objects too if isinstance(value, Trace.ObjectArrayValue): for obj in value: registerObject(event, name, obj) for event in trace.events: # OpenGL needs persistent arrays if event.name.startswith("gl"): usePersistentArrays = True for name, value in event.values.items(): if isinstance(value, Trace.Object): registerObject(event, name, value) elif isinstance(value, Trace.Array): registerArray(event, name, value) # Collect the modified arrays for this event for array in event.modifiedArrays: a = arrayId(array) # Only consider the arrays we know about if a in arrayTypes: arrayVariants[a].append(array) task.step() # Count the number of frames if frameMarkers: frameCount = len(frameMarkers) + 3 else: frameCount = len([1 for event in trace.events if self.analyzer.lookupFunction(event).isFrameMarker]) + 3 # Add the header print >>traceFile, "/**" print >>traceFile, " * C source generated from %d events (%d frames). " % (len(trace.events), frameCount) if insertCopyright: print >>traceFile, copyrightText print >>traceFile, " */" print >>traceFile, "" print >>traceFile, "/** A macro for copying data into an array */" print >>traceFile, "#define LOAD_ARRAY(TO, FROM, LENGTH) \\" print >>traceFile, indent, "{ \\" print >>traceFile, indent * 2, "int i; \\" print >>traceFile, indent * 2, "for (i = 0; i < (LENGTH); i++) \\" print >>traceFile, indent * 3, "(TO)[i] = (FROM)[i]; \\" print >>traceFile, indent, "}" print >>traceFile, "" # Insert any additional code specified in the configuration if "c_player_code" in config: for fileName in config["c_player_code"]: f = open(config.getRelativePath(fileName)) print >>traceFile, f.read() f.close() # Add the header to the data file if we have one if dataFile: if dataFileFormat == "gcc": print >>dataFile, "#" print >>dataFile, "# GNU Assembler data file generated from %d events (%d frames). " % (len(trace.events), frameCount) print >>dataFile, "#" print >>dataFile, "" print >>dataFile, ".section .data" else: # rvct print >>dataFile, ";" print >>dataFile, "; RVCT Assembler data file generated from %d events (%d frames). " % (len(trace.events), frameCount) print >>dataFile, ";" print >>dataFile, "" print >>dataFile, " AREA ||.constdata||, DATA, READONLY, ALIGN=2" # Introduce objects print >>traceFile, "/* Objects */ " for objects in classes.values(): for obj in objects.values(): print >>traceFile, "static %s %s = (%s)0x%x;" % (objectTypes[obj], objectId(obj), objectTypes[obj], obj.id) print >>traceFile, "" task.step() # Introduce arrays print >>traceFile, "/* %d arrays */ " % len(arrays) for i, array in enumerate(arrays.values()): a = arrayId(array) if usePersistentArrays: l = arraySizes[a] if not l: self.analyzer.reportWarning("Empty array %s" % str(a)) l = 1 print >>traceFile, "static %s %s_%s%d[%d];" % (arrayTypes[a], str(arrayTypes[a]).lower(), arrayPrefix, i, l) else: print >>traceFile, "static %s* %s_%s%d;" % (arrayTypes[a], str(arrayTypes[a]).lower(), arrayPrefix, i) print >>traceFile, "" # Introduce unique array data print >>traceFile, "/* Array data */ " arrayData = [] arrayMap = {} for variants in arrayVariants.values(): for array in variants: # See if an equivalent array is already created for j, existingArray in enumerate(arrayData): if existingArray == array and \ existingArray.__class__ == array.__class__: arrayMap[id(array)] = j break else: arrayMap[id(array)] = len(arrayData) arrayData.append(array) if not dataFile: # Inline data for i, array in enumerate(arrayData): if not len(array): continue # Object arrays can't be initialized inline if isinstance(array, Trace.ObjectArrayValue): print >>traceFile, "static %s %sData%d[%d];" % (arrayTypes[arrayId(array)], arrayPrefix, i, len(array)) print >>traceFile, "" continue elif usePersistentArrays: print >>traceFile, "static const %s %sData%d[%d] = {" % (arrayTypes[arrayId(array)], arrayPrefix, i, len(array)) else: print >>traceFile, "static %s %sData%d[%d] = {" % (arrayTypes[arrayId(array)], arrayPrefix, i, len(array)) print >>traceFile, indent, # Figure out the proper qualifier for the array elements qualifier = "" format = "s" if len(array): if isinstance(array, Trace.FloatArrayValue): format = qualifier = "f" elif isinstance(array, Trace.DoubleArrayValue): format = qualifier = "d" elif isinstance(array, Trace.LongArrayValue): format = qualifier = "l" for k, value in enumerate(array): value = ("%%%s%s" % (format, qualifier)) % value if k != len(array) - 1: print >>traceFile, "%s," % value, if not (k + 1) % 8: print >>traceFile, "" print >>traceFile, indent, else: print >>traceFile, value print >>traceFile, "};" print >>traceFile, "" else: # External data for i, array in enumerate(arrayData): if not len(array): continue if usePersistentArrays and not isinstance(array, Trace.ObjectArrayValue): print >>traceFile, "extern const %s %sData%d[%d];" % (arrayTypes[arrayId(array)], arrayPrefix, i, len(array)) else: print >>traceFile, "extern %s %sData%d[%d];" % (arrayTypes[arrayId(array)], arrayPrefix, i, len(array)) # Object arrays can't be initialized inline if isinstance(array, Trace.ObjectArrayValue): continue # Figure out the proper type code for the array elements if dataFileFormat == "gcc": print >>dataFile, ".global %sData%d" % (arrayPrefix, i) print >>dataFile, "%sData%d:" % (arrayPrefix, i) if isinstance(array, Trace.FloatArrayValue): typeCode = ".float" elif isinstance(array, Trace.DoubleArrayValue): typeCode = ".double" elif isinstance(array, Trace.LongArrayValue): typeCode = ".quad" elif isinstance(array, Trace.ShortArrayValue): typeCode = ".short" elif isinstance(array, Trace.ByteArrayValue): typeCode = ".byte" elif isinstance(array, Trace.IntegerArrayValue): typeCode = ".int" else: raise RuntimeError("Unknown array type") # Write out the data print >>dataFile, "%s %s" % (typeCode, ", ".join(map(str, array))) else: # rvct print >>dataFile, "GLOBAL %sData%d" % (arrayPrefix, i) print >>dataFile, "%sData%d" % (arrayPrefix, i) if isinstance(array, Trace.FloatArrayValue): typeCode = "DCFS" elif isinstance(array, Trace.DoubleArrayValue): typeCode = "DCFD" elif isinstance(array, Trace.LongArrayValue): typeCode = "DCQ" elif isinstance(array, Trace.ShortArrayValue): typeCode = "DCW" elif isinstance(array, Trace.ByteArrayValue): typeCode = "DCB" elif isinstance(array, Trace.IntegerArrayValue): typeCode = "DCD" else: raise RuntimeError("Unknown array type") # Write out the data prefix = " %s " % typeCode for j in xrange(0, len(array), 8): values = array[j:j + 8] print >>dataFile, prefix, ",".join(map(str, values)) # Initialize the objects print >>traceFile, "static void %s(void* %s)" % (initFuncName, playerArgument) print >>traceFile, "{" def getObjectAttributeValue(attr): if isinstance(attr, Trace.Array): # Only strings are supported so far assert isinstance(attr, Trace.ByteArrayValue) s = "".join((chr(c) for c in attr)) s = s.replace("\r", "\\r") s = s.replace("\t", "\\t") s = s.rstrip("\x00") lines = s.split("\n") return "\n".join(('"%s\\n"' % l for l in lines)) return str(attr) for objects in classes.values(): for obj in objects.values(): # If the object has attributes or it wasn't created from a return value, ask the user to create it cClass = library.classes.get(obj.cls.name) if obj.attrs or (not obj in outValueObjects and cClass and cClass.overridable): print >>traceFile, indent, "/* %s attributes: %s */" % (obj.cls.name, ", ".join(obj.attrs.keys())) if obj.attrs: attrs = ", ".join(map(getObjectAttributeValue, obj.attrs.values())) print >>traceFile, indent, "%s = create%s%d(%s, %s);" % (objectId(obj), obj.cls.name, len(obj.attrs) + 1, playerArgument, attrs) else: print >>traceFile, indent, "%s = create%s1(%s);" % (objectId(obj), obj.cls.name, playerArgument) print >>traceFile, "}" print >>traceFile, "" # Uninitialize the objects print >>traceFile, "static void %s(void* %s)" % (uninitFuncName, playerArgument) print >>traceFile, "{" for objects in classes.values(): for obj in objects.values(): # If the object has attributes or it wasn't created from a return value, ask the user to destroy it cClass = library.classes.get(obj.cls.name) if obj.attrs or (not obj in outValueObjects and cClass and cClass.overridable): print >>traceFile, indent, "destroy%s2(%s, %s);" % (obj.cls.name, playerArgument, objectId(obj)) print >>traceFile, "}" print >>traceFile, "" # Add the events task.finish() task = Task.startTask("c-export", "Generating source", len(trace.events)) frameNumber = 0 frameFunctions = ["%s0" % frameFuncName] activeArrays = dict([(a, None) for a in arrays.keys()]) # Open the frame function print >>traceFile, "static void %s0(void* %s)" % (frameFuncName, playerArgument) print >>traceFile, "{" for event in trace.events: function = self.analyzer.lookupFunction(event) # Modify objects for obj in event.modifiedObjects: # Check the the object was really modified if obj.attrs and obj.attrs != classes[obj.cls][objectId(obj)].attrs: attrs = ", ".join(map(getObjectAttributeValue, obj.attrs.values())) print >>traceFile, indent, "/* %s attributes: %s */" % (obj.cls.name, ", ".join(obj.attrs.keys())) print >>traceFile, indent, "%s = modify%s%d(%s, %s, %s);" % \ (objectId(obj), obj.cls.name, len(obj.attrs) + 2, playerArgument, objectId(obj), attrs) classes[obj.cls][objectId(obj)].attrs = obj.attrs # Modify arrays for array in event.modifiedArrays: # Load the correct data into the array a = arrayId(array) # If this array is not used anywhere, skip it if not id(array) in arrayMap: continue toArray = arrays.index(a) fromArray = arrayMap[id(array)] # Don't reload the same data if activeArrays[a] == fromArray: continue # Ignore empty arrays if not len(array): continue activeArrays[a] = fromArray # Insert new objects directly into the array if isinstance(array, Trace.ObjectArrayValue): for i, obj in enumerate(array): print >>traceFile, indent, "%s_%s%d[%d] = %s;" % \ (str(arrayTypes[a]).lower(), arrayPrefix, toArray, i, objectId(obj)) elif usePersistentArrays: print >>traceFile, indent, "LOAD_ARRAY(%s_%s%d, %sData%d, %d);" % \ (str(arrayTypes[a]).lower(), arrayPrefix, toArray, arrayPrefix, fromArray, len(array)) else: print >>traceFile, indent, "%s_%s%d = %sData%d;" % \ (str(arrayTypes[a]).lower(), arrayPrefix, toArray, arrayPrefix, fromArray) # Collect the arguments args = [] returnValue = None for name, value in event.values.items(): valueType = name and function.parameters[name].type or function.type if value is None: value = "(%s)0" % valueType elif isinstance(value, Trace.Array): # If this array can be modified by the function, mark it as lost if not valueType.isConstant() and value in event.modifiedArrays: a = arrayId(value) activeArrays[a] = None if not value.id: value = "(%s)0" % valueType else: a = arrayId(value) value = "(%s)%s_%s%d" % (valueType, str(arrayTypes[a]).lower(), arrayPrefix, arrays.index(a)) elif isinstance(value, Trace.Object): value = str(objectId(value)) elif isinstance(value, Trace.UnknownPhrase): value = "(%s)NULL" % valueType else: value = StringUtils.decorateValue(library, function, name, value) if isinstance(value, Trace.FloatValue): value = str(value) + "f" elif isinstance(value, Trace.DoubleValue): value = str(value) + "d" elif isinstance(value, Trace.LongValue): value = str(value) + "l" # Do a cast if this is actually a pointer parameter (e.g. 'ptr' in glVertexAttribPointer) if name and library.isPointerType(function.parameters[name].type): value = "(%s)%s" % (valueType, value) # If the real C type is unsigned and we have a negative value, do a cast try: if name and "unsigned" in str(library.resolveType(function.parameters[name].type)) and int(value) < 0: value = "(%s)%s" % (function.parameters[name].type, value) except __HOLE__: # Not an integer pass # HACK: eglGetDisplay(0) -> eglGetDisplay(EGL_DEFAULT_DISPLAY) if event.name == "eglGetDisplay" and name and str(value) == "0": value = "EGL_DEFAULT_DISPLAY" # Make sure we have a meaningful parameter value assert len(str(value)) if name: args.append(str(value)) else: returnValue = value # Truncated event stream? if not len(args) == len(function.parameters): self.analyzer.reportWarning("Truncated call to %s(%s)" % (event.name, ", ".join(args))) print >>traceFile, indent, "/* truncated call to %s(%s) */" % (event.name, ", ".join(args)) continue # Save the return value if needed returnObject = event.values.get(None, None) if isinstance(returnObject, Trace.Object): print >>traceFile, indent, "%s =" % objectId(returnObject), else: print >>traceFile, indent, args = ", ".join(args) print >>traceFile, "%s(%s);" % (event.name, args) # Apply modifications to object arrays for array in event.modifiedArrays: if isinstance(array, Trace.ObjectArrayValue): for i, obj in enumerate(array): a = arrayId(array) fromArray = arrays.index(a) print >>traceFile, indent, "%s = %s_%s%d[%d];" % \ (objectId(obj), str(arrayTypes[a]).lower(), arrayPrefix, fromArray, i) if (not frameMarkers and function.isFrameMarker) or event in frameMarkers: frameNumber += 1 name = "%s%d" % (frameFuncName, frameNumber) print >>traceFile, "}" print >>traceFile, "" print >>traceFile, "/**" print >>traceFile, " * Frame #%d" % frameNumber print >>traceFile, " */" print >>traceFile, "static void %s(void* %s)" % (name, playerArgument) print >>traceFile, "{" frameFunctions.append(name) task.step() print >>traceFile, "}" print >>traceFile, "" # Create the playback function print >>traceFile, "/**" print >>traceFile, " * Play back all trace frames." print >>traceFile, " * @param %s Optional user data pointer" % (playerArgument) print >>traceFile, " */" print >>traceFile, "static void %s(void* %s)" % (playFuncName, playerArgument) print >>traceFile, "{" print >>traceFile, indent, "%s(%s);" % (initFuncName, playerArgument) for name in frameFunctions: print >>traceFile, indent, "%s(%s);" % (name, playerArgument) print >>traceFile, indent, "%s(%s);" % (uninitFuncName, playerArgument) print >>traceFile, "}" print >>traceFile, "" # Create the playback function for single frame playback print >>traceFile, "/**" print >>traceFile, " * Play back a single frame of the trace." print >>traceFile, " * @param %s Optional user data pointer" % (playerArgument) print >>traceFile, " * @param frame Zero-based number of frame to play" print >>traceFile, " * @returns 1 if the frame number was valid, 0 otherwise" print >>traceFile, " */" print >>traceFile, "static int %s(void* %s, int frame)" % (playFrameFuncName, playerArgument) print >>traceFile, "{" print >>traceFile, indent, "switch (frame)" print >>traceFile, indent, "{" print >>traceFile, indent * 2, "case %6d: %s(%s); break;" % (0, initFuncName, playerArgument) for i, name in enumerate(frameFunctions): print >>traceFile, indent * 2, "case %6d: %s(%s); break;" % (i + 1, name, playerArgument) print >>traceFile, indent * 2, "case %6d: %s(%s); break;" % (len(frameFunctions) + 1, uninitFuncName, playerArgument) print >>traceFile, indent * 2, "default: return 0;" print >>traceFile, indent, "}" print >>traceFile, indent, "return 1;" print >>traceFile, "}" # Close the data file if dataFile: dataFile.close() # All done task.finish()
ValueError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/plugins/core/CSourceFormat.py/CSourceExporterPlugin.saveTrace
1,894
def handle(self, *args, **options): try: do_adduser(args[0], args[1]) msg = 'Adding {user} to {group}\n'.format(user=args[0], group=args[1]) self.log.info(msg) self.stdout.write(msg) except __HOLE__: raise CommandError(self.help)
IndexError
dataset/ETHPy150Open mozilla/addons-server/src/olympia/zadmin/management/commands/addusertogroup.py/Command.handle
1,895
def _preprocess_kwargs(self, initial_kwargs): """ Replace generic key related attribute with filters by object_id and content_type fields """ kwargs = initial_kwargs.copy() generic_key_related_kwargs = self._get_generic_key_related_kwargs(initial_kwargs) for key, value in generic_key_related_kwargs.items(): # delete old kwarg that was related to generic key del kwargs[key] try: suffix = key.split('__')[1] except __HOLE__: suffix = None # add new kwargs that related to object_id and content_type fields new_kwargs = self._get_filter_object_id_and_content_type_filter_kwargs(value, suffix) kwargs.update(new_kwargs) return kwargs
IndexError
dataset/ETHPy150Open opennode/nodeconductor/nodeconductor/core/managers.py/GenericKeyMixin._preprocess_kwargs
1,896
def save(data, name=None): if name is None: name = data.filename data = data.read() datafile = StringIO(data) if ext.resizer: try: images = ext.resizer.resize_image(datafile) except __HOLE__: # Not an image. return save_file(name, data) save_images(name, data, images) else: return save_file(name, data)
IOError
dataset/ETHPy150Open FelixLoether/flask-image-upload-thing/flask_uploads/functions.py/save
1,897
def next(self): # check if all iterators exhausted if not any(self.it): raise StopIteration # yield all that correspond to target key and update result = [] targetkey = self.targetkey updated = 0 for x, y in enumerate(self.current): key, val = y if key and key == targetkey: # save result (instantiate to prevent overwriting) # in next method result.append(list(val)) # advance try: self.current[x] = next(self.it[x]) except __HOLE__: self.it[x] = None self.current[x] = (None, None) updated += 1 else: # return empty result result.append([]) assert updated > 0, "no updates - infinite loop" # decide which is target key try: self.targetkey = min([x[0] for x in self.current if x[0] is not None]) except ValueError: # if all are None, sequence is empty self.targetkey = None return targetkey, result
StopIteration
dataset/ETHPy150Open CGATOxford/cgat/scripts/diff_bam.py/multiway_groupby.next
1,898
def get_template_sources(template_name, template_dirs=None): if not hasattr(gsettings.threadlocal, "theme"): print "get_template_sources no threadlocal" return theme = gsettings.threadlocal.theme + "/" if not template_dirs: template_dirs = settings.TEMPLATE_DIRS for template_dir in template_dirs: try: yield safe_join(template_dir, theme + template_name) except __HOLE__: # The joined path was located outside of template_dir. pass # }}} # load_template_source # {{{
ValueError
dataset/ETHPy150Open amitu/gitology/src/gitology/d/themed_template_loader.py/get_template_sources
1,899
def load_template_source(template_name, template_dirs=None): tried = [] for filepath in get_template_sources(template_name, template_dirs): try: return (open(filepath).read().decode(settings.FILE_CHARSET), filepath) except __HOLE__: tried.append(filepath) if tried: error_msg = "Tried %s" % tried else: error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory." raise TemplateDoesNotExist, error_msg # }}}
IOError
dataset/ETHPy150Open amitu/gitology/src/gitology/d/themed_template_loader.py/load_template_source